index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/DirectDoublesSketchAccessor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import java.util.Arrays;
import org.apache.datasketches.memory.WritableMemory;
/**
* @author Jon Malkin
*/
class DirectDoublesSketchAccessor extends DoublesSketchAccessor {
DirectDoublesSketchAccessor(final DoublesSketch ds,
final boolean forceSize,
final int level) {
super(ds, forceSize, level);
assert ds.hasMemory();
}
@Override
DoublesSketchAccessor copyAndSetLevel(final int level) {
return new DirectDoublesSketchAccessor(ds_, forceSize_, level);
}
@Override
double get(final int index) {
assert index >= 0 && index < numItems_;
assert n_ == ds_.getN();
final int idxOffset = offset_ + (index << 3);
return ds_.getMemory().getDouble(idxOffset);
}
@Override
double set(final int index, final double quantile) {
assert index >= 0 && index < numItems_;
assert n_ == ds_.getN();
assert !ds_.isCompact(); // can't write to a compact sketch
final int idxOffset = offset_ + (index << 3);
final WritableMemory mem = ds_.getMemory();
final double oldVal = mem.getDouble(idxOffset);
mem.putDouble(idxOffset, quantile);
return oldVal;
}
@Override
double[] getArray(final int fromIdx, final int numItems) {
final double[] dstArray = new double[numItems];
final int offsetBytes = offset_ + (fromIdx << 3);
ds_.getMemory().getDoubleArray(offsetBytes, dstArray, 0, numItems);
return dstArray;
}
@Override
void putArray(final double[] srcArray, final int srcIndex,
final int dstIndex, final int numItems) {
assert !ds_.isCompact(); // can't write to compact sketch
final int offsetBytes = offset_ + (dstIndex << 3);
ds_.getMemory().putDoubleArray(offsetBytes, srcArray, srcIndex, numItems);
}
@Override
void sort() {
assert currLvl_ == BB_LVL_IDX;
final double[] tmpBuffer = new double[numItems_];
final WritableMemory mem = ds_.getMemory();
mem.getDoubleArray(offset_, tmpBuffer, 0, numItems_);
Arrays.sort(tmpBuffer, 0, numItems_);
mem.putDoubleArray(offset_, tmpBuffer, 0, numItems_);
}
}
| 2,800 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/ItemsUpdateImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import java.util.Arrays;
import java.util.Comparator;
final class ItemsUpdateImpl {
private ItemsUpdateImpl() {}
//important: newN might not equal n_
// This only increases the size and does not touch or move any data.
static <T> void maybeGrowLevels(final ItemsSketch<T> sketch, final long newN) {
// important: newN might not equal n_
final int k = sketch.getK();
final int numLevelsNeeded = ClassicUtil.computeNumLevelsNeeded(k, newN);
if (numLevelsNeeded == 0) {
// don't need any levels yet, and might have small base buffer; this can happen during a merge
return;
}
// from here on we need a full-size base buffer and at least one level
assert newN >= 2L * k;
assert numLevelsNeeded > 0;
final int spaceNeeded = (2 + numLevelsNeeded) * k;
if (spaceNeeded <= sketch.getCombinedBufferAllocatedCount()) {
return;
}
// copies base buffer plus old levels
sketch.combinedBuffer_ = Arrays.copyOf(sketch.getCombinedBuffer(), spaceNeeded);
sketch.combinedBufferItemCapacity_ = spaceNeeded;
}
@SuppressWarnings("unchecked")
static <T> void inPlacePropagateCarry(
final int startingLevel,
final T[] sizeKBuf, final int sizeKStart,
final T[] size2KBuf, final int size2KStart,
final boolean doUpdateVersion,
final ItemsSketch<T> sketch) { // else doMergeIntoVersion
final Object[] levelsArr = sketch.getCombinedBuffer();
final long bitPattern = sketch.getBitPattern();
final int k = sketch.getK();
final int endingLevel = ClassicUtil.lowestZeroBitStartingAt(bitPattern, startingLevel);
if (doUpdateVersion) { // update version of computation
// its is okay for sizeKbuf to be null in this case
zipSize2KBuffer(
size2KBuf, size2KStart,
levelsArr, (2 + endingLevel) * k,
k);
} else { // mergeInto version of computation
System.arraycopy(
sizeKBuf, sizeKStart,
levelsArr, (2 + endingLevel) * k,
k);
}
for (int lvl = startingLevel; lvl < endingLevel; lvl++) {
assert (bitPattern & (1L << lvl)) > 0; // internal consistency check
mergeTwoSizeKBuffers(
(T[]) levelsArr, (2 + lvl) * k,
(T[]) levelsArr, (2 + endingLevel) * k,
size2KBuf, size2KStart,
k, sketch.getComparator());
zipSize2KBuffer(
size2KBuf, size2KStart,
levelsArr, (2 + endingLevel) * k,
k);
// to release the discarded objects
Arrays.fill(levelsArr, (2 + lvl) * k, (2 + lvl + 1) * k, null);
} // end of loop over lower levels
// update bit pattern with binary-arithmetic ripple carry
sketch.bitPattern_ = bitPattern + (1L << startingLevel);
}
//note: this version refers to the ItemsSketch.rand
private static void zipSize2KBuffer(
final Object[] bufA, final int startA, // input
final Object[] bufC, final int startC, // output
final int k) {
final int randomOffset = ItemsSketch.rand.nextBoolean() ? 1 : 0;
final int limC = startC + k;
for (int a = startA + randomOffset, c = startC; c < limC; a += 2, c++) {
bufC[c] = bufA[a];
}
}
//note: this version uses a comparator
private static <T> void mergeTwoSizeKBuffers(
final T[] keySrc1, final int arrStart1,
final T[] keySrc2, final int arrStart2,
final T[] keyDst, final int arrStart3,
final int k, final Comparator<? super T> comparator) {
final int arrStop1 = arrStart1 + k;
final int arrStop2 = arrStart2 + k;
int i1 = arrStart1;
int i2 = arrStart2;
int i3 = arrStart3;
while (i1 < arrStop1 && i2 < arrStop2) {
if (comparator.compare(keySrc2[i2], keySrc1[i1]) < 0) {
keyDst[i3++] = keySrc2[i2++];
} else {
keyDst[i3++] = keySrc1[i1++];
}
}
if (i1 < arrStop1) {
System.arraycopy(keySrc1, i1, keyDst, i3, arrStop1 - i1);
} else {
assert i2 < arrStop2;
System.arraycopy(keySrc1, i2, keyDst, i3, arrStop2 - i2);
}
}
}
| 2,801 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/DirectUpdateDoublesSketchR.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import static org.apache.datasketches.quantiles.PreambleUtil.COMBINED_BUFFER;
import static org.apache.datasketches.quantiles.PreambleUtil.COMPACT_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.EMPTY_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.MAX_DOUBLE;
import static org.apache.datasketches.quantiles.PreambleUtil.MIN_DOUBLE;
import static org.apache.datasketches.quantiles.PreambleUtil.N_LONG;
import static org.apache.datasketches.quantiles.PreambleUtil.ORDERED_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.READ_ONLY_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.extractFamilyID;
import static org.apache.datasketches.quantiles.PreambleUtil.extractFlags;
import static org.apache.datasketches.quantiles.PreambleUtil.extractK;
import static org.apache.datasketches.quantiles.PreambleUtil.extractN;
import static org.apache.datasketches.quantiles.PreambleUtil.extractPreLongs;
import static org.apache.datasketches.quantiles.PreambleUtil.extractSerVer;
import org.apache.datasketches.common.SketchesArgumentException;
import org.apache.datasketches.common.SketchesReadOnlyException;
import org.apache.datasketches.memory.Memory;
import org.apache.datasketches.memory.WritableMemory;
import org.apache.datasketches.quantilescommon.QuantilesAPI;
/**
* Implements the DoublesSketch off-heap.
*
* @author Kevin Lang
* @author Lee Rhodes
*
*/
class DirectUpdateDoublesSketchR extends UpdateDoublesSketch {
static final int MIN_DIRECT_DOUBLES_SER_VER = 3;
WritableMemory mem_;
//**CONSTRUCTORS**********************************************************
DirectUpdateDoublesSketchR(final int k) {
super(k); //Checks k
}
/**
* Wrap this sketch around the given non-compact Memory image of a DoublesSketch.
*
* @param srcMem the given non-compact Memory image of a DoublesSketch that may have data
* @return a sketch that wraps the given srcMem
*/
static DirectUpdateDoublesSketchR wrapInstance(final Memory srcMem) {
final long memCap = srcMem.getCapacity();
final int preLongs = extractPreLongs(srcMem);
final int serVer = extractSerVer(srcMem);
final int familyID = extractFamilyID(srcMem);
final int flags = extractFlags(srcMem);
final int k = extractK(srcMem);
final boolean empty = (flags & EMPTY_FLAG_MASK) > 0; //Preamble flags empty state
final long n = empty ? 0 : extractN(srcMem);
//VALIDITY CHECKS
checkPreLongs(preLongs);
ClassicUtil.checkFamilyID(familyID);
DoublesUtil.checkDoublesSerVer(serVer, MIN_DIRECT_DOUBLES_SER_VER);
checkDirectFlags(flags); //Cannot be compact
ClassicUtil.checkK(k);
checkCompact(serVer, flags);
checkDirectMemCapacity(k, n, memCap);
checkEmptyAndN(empty, n);
final DirectUpdateDoublesSketchR dds = new DirectUpdateDoublesSketchR(k);
dds.mem_ = (WritableMemory) srcMem;
return dds;
}
@Override
public double getMaxItem() {
if (isEmpty()) { throw new IllegalArgumentException(QuantilesAPI.EMPTY_MSG); }
return mem_.getDouble(MAX_DOUBLE);
}
@Override
public double getMinItem() {
if (isEmpty()) { throw new IllegalArgumentException(QuantilesAPI.EMPTY_MSG); }
return mem_.getDouble(MIN_DOUBLE);
}
@Override
public long getN() {
return (mem_.getCapacity() < COMBINED_BUFFER) ? 0 : mem_.getLong(N_LONG);
}
@Override
public boolean hasMemory() {
return (mem_ != null);
}
@Override
public boolean isDirect() {
return (mem_ != null) ? mem_.isDirect() : false;
}
@Override
public boolean isReadOnly() {
return true;
}
@Override
public boolean isSameResource(final Memory that) {
return mem_.isSameResource(that);
}
@Override
public void reset() {
throw new SketchesReadOnlyException("Call to reset() on read-only buffer");
}
@Override
public void update(final double dataItem) {
throw new SketchesReadOnlyException("Call to update() on read-only buffer");
}
//Restricted overrides
//Gets
@Override
int getBaseBufferCount() {
return ClassicUtil.computeBaseBufferItems(getK(), getN());
}
@Override
int getCombinedBufferItemCapacity() {
return ((int)mem_.getCapacity() - COMBINED_BUFFER) / 8;
}
@Override
double[] getCombinedBuffer() {
final int k = getK();
if (isEmpty()) { return new double[k << 1]; } //2K
final long n = getN();
final int itemCap = ClassicUtil.computeCombinedBufferItemCapacity(k, n);
final double[] combinedBuffer = new double[itemCap];
mem_.getDoubleArray(COMBINED_BUFFER, combinedBuffer, 0, itemCap);
return combinedBuffer;
}
@Override
long getBitPattern() {
final int k = getK();
final long n = getN();
return ClassicUtil.computeBitPattern(k, n);
}
@Override
WritableMemory getMemory() {
return mem_;
}
//Puts
@Override
void putMinItem(final double minQuantile) {
throw new SketchesReadOnlyException("Call to putMinQuantile() on read-only buffer");
}
@Override
void putMaxItem(final double maxQuantile) {
throw new SketchesReadOnlyException("Call to putMaxQuantile() on read-only buffer");
}
@Override
void putN(final long n) {
throw new SketchesReadOnlyException("Call to putN() on read-only buffer");
}
@Override
void putCombinedBuffer(final double[] combinedBuffer) {
throw new SketchesReadOnlyException("Call to putCombinedBuffer() on read-only buffer");
}
@Override
void putBaseBufferCount(final int baseBufferCount) {
throw new SketchesReadOnlyException("Call to putBaseBufferCount() on read-only buffer");
}
@Override
void putBitPattern(final long bitPattern) {
throw new SketchesReadOnlyException("Call to putBaseBufferCount() on read-only buffer");
}
@Override
double[] growCombinedBuffer(final int curCombBufItemCap, final int itemSpaceNeeded) {
throw new SketchesReadOnlyException("Call to growCombinedBuffer() on read-only buffer");
}
//Checks
/**
* Checks the validity of the direct memory capacity assuming n, k.
* @param k the given k
* @param n the given n
* @param memCapBytes the current memory capacity in bytes
*/
static void checkDirectMemCapacity(final int k, final long n, final long memCapBytes) {
final int reqBufBytes = getUpdatableStorageBytes(k, n);
if (memCapBytes < reqBufBytes) {
throw new SketchesArgumentException("Possible corruption: Memory capacity too small: "
+ memCapBytes + " < " + reqBufBytes);
}
}
static void checkCompact(final int serVer, final int flags) {
final boolean compact = (serVer == 2) | ((flags & COMPACT_FLAG_MASK) > 0);
if (compact) {
throw new SketchesArgumentException("Compact Memory is not supported for Wrap Instance.");
}
}
static void checkPreLongs(final int preLongs) {
if ((preLongs < 1) || (preLongs > 2)) {
throw new SketchesArgumentException(
"Possible corruption: PreLongs must be 1 or 2: " + preLongs);
}
}
static void checkDirectFlags(final int flags) {
final int allowedFlags = //Cannot be compact!
READ_ONLY_FLAG_MASK | EMPTY_FLAG_MASK | ORDERED_FLAG_MASK;
final int flagsMask = ~allowedFlags;
if ((flags & flagsMask) > 0) {
throw new SketchesArgumentException(
"Possible corruption: Invalid flags field: Cannot be compact! "
+ Integer.toBinaryString(flags));
}
}
static void checkEmptyAndN(final boolean empty, final long n) {
if (empty && (n > 0)) {
throw new SketchesArgumentException(
"Possible corruption: Empty Flag = true and N > 0: " + n);
}
}
}
| 2,802 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/HeapCompactDoublesSketch.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import static org.apache.datasketches.quantiles.ClassicUtil.computeBaseBufferItems;
import static org.apache.datasketches.quantiles.ClassicUtil.computeBitPattern;
import static org.apache.datasketches.quantiles.ClassicUtil.computeRetainedItems;
import static org.apache.datasketches.quantiles.PreambleUtil.COMPACT_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.EMPTY_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.MAX_DOUBLE;
import static org.apache.datasketches.quantiles.PreambleUtil.MIN_DOUBLE;
import static org.apache.datasketches.quantiles.PreambleUtil.READ_ONLY_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.extractFamilyID;
import static org.apache.datasketches.quantiles.PreambleUtil.extractFlags;
import static org.apache.datasketches.quantiles.PreambleUtil.extractK;
import static org.apache.datasketches.quantiles.PreambleUtil.extractN;
import static org.apache.datasketches.quantiles.PreambleUtil.extractPreLongs;
import static org.apache.datasketches.quantiles.PreambleUtil.extractSerVer;
import java.util.Arrays;
import org.apache.datasketches.common.SketchesArgumentException;
import org.apache.datasketches.memory.Memory;
import org.apache.datasketches.memory.WritableMemory;
import org.apache.datasketches.quantilescommon.QuantilesAPI;
/**
* Implements the DoublesSketch on the Java heap.
*
* @author Lee Rhodes
* @author Jon Malkin
*/
final class HeapCompactDoublesSketch extends CompactDoublesSketch {
static final int MIN_HEAP_DOUBLES_SER_VER = 1;
/**
* The smallest item ever seen in the stream.
*/
private double minItem_;
/**
* The largest item ever seen in the stream.
*/
private double maxItem_;
/**
* The total count of items seen.
*/
private long n_;
/**
* Number of items currently in base buffer.
*
* <p>Count = N % (2*K)
*/
private int baseBufferCount_;
/**
* Active levels expressed as a bit pattern.
*
* <p>Pattern = N / (2 * K)
*/
private long bitPattern_;
/**
* This single array contains the base buffer plus all used levels.
* A level is of size K and is either full and sorted.
* Whether a level buffer is present is indicated by the bitPattern_.
* The base buffer is sorted and has max length 2*K but uses only baseBufferCount_ items.
* The base buffer precedes the level buffers. This buffer does not include the min, max items.
*
* <p>The levels arrays require quite a bit of explanation, which we defer until later.</p>
*/
private double[] combinedBuffer_;
//**CONSTRUCTORS**********************************************************
private HeapCompactDoublesSketch(final int k) {
super(k); //Checks k
}
/**
* Converts the given UpdateDoublesSketch to this compact form.
*
* @param sketch the sketch to convert
* @return a HeapCompactDoublesSketch created from an UpdateDoublesSketch
*/
static HeapCompactDoublesSketch createFromUpdateSketch(final UpdateDoublesSketch sketch) {
final int k = sketch.getK();
final long n = sketch.getN();
final HeapCompactDoublesSketch hcds = new HeapCompactDoublesSketch(k); // checks k
hcds.n_ = n;
hcds.bitPattern_ = computeBitPattern(k, n);
assert hcds.bitPattern_ == sketch.getBitPattern();
hcds.minItem_ = sketch.isEmpty() ? Double.NaN : sketch.getMinItem();
hcds.maxItem_ = sketch.isEmpty() ? Double.NaN : sketch.getMaxItem();
hcds.baseBufferCount_ = computeBaseBufferItems(k, n);
assert hcds.baseBufferCount_ == sketch.getBaseBufferCount();
//if (sketch.isEmpty()) {
// hcds.combinedBuffer_ = null;
// return hcds;
//}
final int retainedItems = computeRetainedItems(k, n);
final double[] combinedBuffer = new double[retainedItems];
final DoublesSketchAccessor accessor = DoublesSketchAccessor.wrap(sketch);
assert hcds.baseBufferCount_ == accessor.numItems();
// copy and sort base buffer
System.arraycopy(accessor.getArray(0, hcds.baseBufferCount_), 0,
combinedBuffer, 0,
hcds.baseBufferCount_);
Arrays.sort(combinedBuffer, 0, hcds.baseBufferCount_);
int combinedBufferOffset = hcds.baseBufferCount_;
long bitPattern = hcds.bitPattern_;
for (int lvl = 0; bitPattern > 0; ++lvl, bitPattern >>>= 1) {
if ((bitPattern & 1L) > 0L) {
accessor.setLevel(lvl);
System.arraycopy(accessor.getArray(0, k), 0,
combinedBuffer, combinedBufferOffset, k);
combinedBufferOffset += k;
}
}
hcds.combinedBuffer_ = combinedBuffer;
return hcds;
}
/**
* Heapifies the given srcMem, which must be a Memory image of a DoublesSketch and may have data.
*
* @param srcMem a Memory image of a sketch, which may be in compact or not compact form.
* <a href="{@docRoot}/resources/dictionary.html#mem">See Memory</a>
* @return a DoublesSketch on the Java heap.
*/
static HeapCompactDoublesSketch heapifyInstance(final Memory srcMem) {
final long memCapBytes = srcMem.getCapacity();
if (memCapBytes < Long.BYTES) {
throw new SketchesArgumentException("Source Memory too small: " + memCapBytes + " < 8");
}
final int preLongs = extractPreLongs(srcMem);
final int serVer = extractSerVer(srcMem);
final int familyID = extractFamilyID(srcMem);
final int flags = extractFlags(srcMem);
final int k = extractK(srcMem);
final boolean empty = (flags & EMPTY_FLAG_MASK) > 0; //Preamble flags empty state
final long n = empty ? 0 : extractN(srcMem);
//VALIDITY CHECKS
DoublesUtil.checkDoublesSerVer(serVer, MIN_HEAP_DOUBLES_SER_VER);
ClassicUtil.checkHeapFlags(flags);
HeapUpdateDoublesSketch.checkPreLongsFlagsSerVer(flags, serVer, preLongs);
ClassicUtil.checkFamilyID(familyID);
final HeapCompactDoublesSketch hds = new HeapCompactDoublesSketch(k); //checks k
if (empty) {
hds.n_ = 0;
hds.combinedBuffer_ = null;
hds.baseBufferCount_ = 0;
hds.bitPattern_ = 0;
hds.minItem_ = Double.NaN;
hds.maxItem_ = Double.NaN;
return hds;
}
//Not empty, must have valid preamble + min, max, n.
//Forward compatibility from SerVer = 1 :
final boolean srcIsCompact = (serVer == 2) | ((flags & (COMPACT_FLAG_MASK | READ_ONLY_FLAG_MASK)) > 0);
HeapUpdateDoublesSketch.checkHeapMemCapacity(k, n, srcIsCompact, serVer, memCapBytes);
//set class members by computing them
hds.n_ = n;
hds.baseBufferCount_ = computeBaseBufferItems(k, n);
hds.bitPattern_ = computeBitPattern(k, n);
hds.minItem_ = srcMem.getDouble(MIN_DOUBLE);
hds.maxItem_ = srcMem.getDouble(MAX_DOUBLE);
final int totItems = ClassicUtil.computeRetainedItems(k, n);
hds.srcMemoryToCombinedBuffer(srcMem, serVer, srcIsCompact, totItems);
return hds;
}
@Override
public long getN() {
return n_;
}
@Override
public boolean hasMemory() {
return false;
}
@Override
public boolean isDirect() {
return false;
}
@Override
public double getMinItem() {
if (isEmpty()) { throw new IllegalArgumentException(QuantilesAPI.EMPTY_MSG); }
return minItem_;
}
@Override
public double getMaxItem() {
if (isEmpty()) { throw new IllegalArgumentException(QuantilesAPI.EMPTY_MSG); }
return maxItem_;
}
/**
* Loads the Combined Buffer from the given source Memory.
* The resulting Combined Buffer is allocated in this method and is always in compact form.
* @param srcMem the given source Memory
* @param serVer the serialization version of the source
* @param srcIsCompact true if the given source Memory is in compact form
* @param combBufCap total items for the combined buffer (size in doubles)
*/
private void srcMemoryToCombinedBuffer(final Memory srcMem, final int serVer,
final boolean srcIsCompact, final int combBufCap) {
final int preLongs = 2;
final int extra = (serVer == 1) ? 3 : 2; // space for min and max quantiles, buf alloc (SerVer 1)
final int preBytes = (preLongs + extra) << 3;
final int k = getK();
combinedBuffer_ = new double[combBufCap];
if (srcIsCompact) {
// just load the array, sort base buffer if serVer 2
srcMem.getDoubleArray(preBytes, combinedBuffer_, 0, combBufCap);
if (serVer == 2) {
Arrays.sort(combinedBuffer_, 0, baseBufferCount_);
}
} else {
// non-compact source
// load base buffer and ensure it's sorted
srcMem.getDoubleArray(preBytes, combinedBuffer_, 0, baseBufferCount_);
Arrays.sort(combinedBuffer_, 0, baseBufferCount_);
// iterate through levels
int srcOffset = preBytes + ((2 * k) << 3);
int dstOffset = baseBufferCount_;
long bitPattern = bitPattern_;
for (; bitPattern != 0; srcOffset += (k << 3), bitPattern >>>= 1) {
if ((bitPattern & 1L) > 0L) {
srcMem.getDoubleArray(srcOffset, combinedBuffer_, dstOffset, k);
dstOffset += k;
}
}
}
}
//Restricted overrides
//Gets
@Override
int getBaseBufferCount() {
return baseBufferCount_;
}
@Override
int getCombinedBufferItemCapacity() {
return combinedBuffer_.length;
}
@Override
double[] getCombinedBuffer() {
return combinedBuffer_;
}
@Override
long getBitPattern() {
return bitPattern_;
}
@Override
WritableMemory getMemory() {
return null;
}
}
| 2,803 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/ItemsSketchIterator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import java.util.Objects;
import org.apache.datasketches.common.SketchesStateException;
import org.apache.datasketches.quantilescommon.QuantilesGenericSketchIterator;
/**
* Iterator over ItemsSketch. The order is not defined.
* @param <T> type of item
*/
public final class ItemsSketchIterator<T> implements QuantilesGenericSketchIterator<T> {
private Object[] combinedBuffer;
private long bitPattern;
private int level;
private long weight;
private int index;
private int offset;
private int num;
private int k;
ItemsSketchIterator(final ItemsSketch<T> sketch, final long bitPattern) {
Objects.requireNonNull(sketch, "sketch must not be null");
combinedBuffer = sketch.combinedBuffer_;
num = sketch.getBaseBufferCount();
this.k = sketch.getK();
this.bitPattern = bitPattern;
this.level = -1;
this.weight = 1;
this.index = -1;
this.offset = 0;
}
@Override
@SuppressWarnings("unchecked")
public T getQuantile() {
if (index < 0) { throw new SketchesStateException("index < 0; getQuantile() was called before next()"); }
return (T) combinedBuffer[offset + index];
}
@Override
public long getWeight() {
return weight;
}
@Override
public boolean next() {
index++; // advance index within the current level
if (index < num) {
return true;
}
// go to the next non-empty level
do {
level++;
if (level > 0) {
bitPattern >>>= 1;
}
if (bitPattern == 0L) {
return false; // run out of levels
}
weight *= 2;
} while ((bitPattern & 1L) == 0L);
index = 0;
offset = (2 + level) * k;
num = k;
return true;
}
}
| 2,804 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/UpdateDoublesSketch.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import org.apache.datasketches.memory.Memory;
import org.apache.datasketches.memory.WritableMemory;
/**
* @author Jon Malkin
*/
public abstract class UpdateDoublesSketch extends DoublesSketch {
UpdateDoublesSketch(final int k) {
super(k);
}
/**
* Wrap this sketch around the given non-compact Memory image of a DoublesSketch.
*
* @param srcMem the given Memory image of a DoublesSketch that may have data,
* @return a sketch that wraps the given srcMem
*/
public static UpdateDoublesSketch wrap(final WritableMemory srcMem) {
return DirectUpdateDoublesSketch.wrapInstance(srcMem);
}
/**
* Updates this sketch with the given double data item
*
* @param item an item from a stream of items. NaNs are ignored.
*/
@Override
public abstract void update(double item);
public static UpdateDoublesSketch heapify(final Memory srcMem) {
return HeapUpdateDoublesSketch.heapifyInstance(srcMem);
}
/**
* @return a CompactDoublesSketch of this class
*/
public CompactDoublesSketch compact() {
return compact(null);
}
/**
* Returns a compact version of this sketch. If passing in a Memory object, the compact sketch
* will use that direct memory; otherwise, an on-heap sketch will be returned.
* @param dstMem An optional target memory to hold the sketch.
* @return A compact version of this sketch
*/
public CompactDoublesSketch compact(final WritableMemory dstMem) {
if (dstMem == null) {
return HeapCompactDoublesSketch.createFromUpdateSketch(this);
}
return DirectCompactDoublesSketch.createFromUpdateSketch(this, dstMem);
}
@Override
boolean isCompact() {
return false;
}
//Puts
/**
* Puts the minimum item
*
* @param minItem the given minimum item
*/
abstract void putMinItem(double minItem);
/**
* Puts the max item
*
* @param maxItem the given maximum item
*/
abstract void putMaxItem(double maxItem);
/**
* Puts the long <i>n</i>
*
* @param n the given long <i>n</i>
*/
abstract void putN(long n);
/**
* Puts the combined, non-compact buffer.
*
* @param combinedBuffer the combined buffer array
*/
abstract void putCombinedBuffer(double[] combinedBuffer);
/**
* Puts the base buffer count
*
* @param baseBufCount the given base buffer count
*/
abstract void putBaseBufferCount(int baseBufCount);
/**
* Puts the bit pattern
*
* @param bitPattern the given bit pattern
*/
abstract void putBitPattern(long bitPattern);
/**
* Grows the combined buffer to the given spaceNeeded
*
* @param currentSpace the current allocated space
* @param spaceNeeded the space needed
* @return the enlarged combined buffer with data from the original combined buffer.
*/
abstract double[] growCombinedBuffer(int currentSpace, int spaceNeeded);
}
| 2,805 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/DoublesUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import static org.apache.datasketches.common.Util.LS;
import static org.apache.datasketches.quantiles.ClassicUtil.DOUBLES_SER_VER;
import static org.apache.datasketches.quantiles.ClassicUtil.computeCombinedBufferItemCapacity;
import static org.apache.datasketches.quantiles.ClassicUtil.computeNumLevelsNeeded;
import static org.apache.datasketches.quantiles.ClassicUtil.computeTotalLevels;
import static org.apache.datasketches.quantiles.ClassicUtil.computeValidLevels;
import static org.apache.datasketches.quantiles.ClassicUtil.getNormalizedRankError;
import java.util.Arrays;
import org.apache.datasketches.common.SketchesArgumentException;
import org.apache.datasketches.memory.Memory;
/**
* Utilities that support the doubles quantiles algorithms.
*
* <p>This class contains a highly specialized sort called blockyTandemMergeSort().
* It also contains methods that are used while building histograms and other common
* functions.</p>
*
* @author Lee Rhodes
*/
final class DoublesUtil {
private DoublesUtil() {}
/**
* Returns an on-heap copy of the given sketch
* @param sketch the given sketch
* @return a copy of the given sketch
*/
static HeapUpdateDoublesSketch copyToHeap(final DoublesSketch sketch) {
final HeapUpdateDoublesSketch qsCopy;
qsCopy = HeapUpdateDoublesSketch.newInstance(sketch.getK());
qsCopy.putN(sketch.getN());
qsCopy.putMinItem(sketch.isEmpty() ? Double.NaN : sketch.getMinItem());
qsCopy.putMaxItem(sketch.isEmpty() ? Double.NaN : sketch.getMaxItem());
qsCopy.putBaseBufferCount(sketch.getBaseBufferCount());
qsCopy.putBitPattern(sketch.getBitPattern());
if (sketch.isCompact()) {
final int combBufItems = computeCombinedBufferItemCapacity(sketch.getK(), sketch.getN());
final double[] combBuf = new double[combBufItems];
qsCopy.putCombinedBuffer(combBuf);
final DoublesSketchAccessor sketchAccessor = DoublesSketchAccessor.wrap(sketch);
final DoublesSketchAccessor copyAccessor = DoublesSketchAccessor.wrap(qsCopy);
// start with BB
copyAccessor.putArray(sketchAccessor.getArray(0, sketchAccessor.numItems()),
0, 0, sketchAccessor.numItems());
long bitPattern = sketch.getBitPattern();
for (int lvl = 0; bitPattern != 0L; ++lvl, bitPattern >>>= 1) {
if ((bitPattern & 1L) > 0L) {
sketchAccessor.setLevel(lvl);
copyAccessor.setLevel(lvl);
copyAccessor.putArray(sketchAccessor.getArray(0, sketchAccessor.numItems()),
0, 0, sketchAccessor.numItems());
}
}
} else {
final double[] combBuf = sketch.getCombinedBuffer();
qsCopy.putCombinedBuffer(Arrays.copyOf(combBuf, combBuf.length));
}
return qsCopy;
}
/**
* Check the validity of the given serialization version
* @param serVer the given serialization version
* @param minSupportedSerVer the oldest serialization version supported
*/
static void checkDoublesSerVer(final int serVer, final int minSupportedSerVer) {
final int max = DOUBLES_SER_VER;
if ((serVer > max) || (serVer < minSupportedSerVer)) {
throw new SketchesArgumentException(
"Possible corruption: Unsupported Serialization Version: " + serVer);
}
}
static String toString(final boolean sketchSummary, final boolean dataDetail,
final DoublesSketch sketch) {
final StringBuilder sb = new StringBuilder();
if (dataDetail) {
sb.append(getDataDetail(sketch));
}
if (sketchSummary) {
sb.append(getSummary(sketch));
}
return sb.toString();
}
static String memToString(final boolean sketchSummary, final boolean dataDetail,
final Memory mem) {
final DoublesSketch ds = DoublesSketch.heapify(mem);
return ds.toString(sketchSummary, dataDetail);
}
private static String getSummary(final DoublesSketch sk) {
final StringBuilder sb = new StringBuilder();
final String thisSimpleName = sk.getClass().getSimpleName();
final int k = sk.getK();
final String kStr = String.format("%,d", k);
final long n = sk.getN();
final String nStr = String.format("%,d", n);
final String bbCntStr = String.format("%,d", sk.getBaseBufferCount());
final String combBufCapStr = String.format("%,d", sk.getCombinedBufferItemCapacity());
final long bitPattern = sk.getBitPattern();
final int neededLevels = computeNumLevelsNeeded(k, n);
final int totalLevels = computeTotalLevels(bitPattern);
final int validLevels = computeValidLevels(bitPattern);
final String retItemsStr = String.format("%,d", sk.getNumRetained());
final String cmptBytesStr = String.format("%,d", sk.getCurrentCompactSerializedSizeBytes());
final String updtBytesStr = String.format("%,d", sk.getCurrentUpdatableSerializedSizeBytes());
final double epsPmf = getNormalizedRankError(k, true);
final String epsPmfPctStr = String.format("%.3f%%", epsPmf * 100.0);
final double eps = getNormalizedRankError(k, false);
final String epsPctStr = String.format("%.3f%%", eps * 100.0);
final String memCap = sk.hasMemory() ? Long.toString(sk.getMemory().getCapacity()) : "";
final double minItem = sk.isEmpty() ? Double.NaN : sk.getMinItem();
final double maxItem = sk.isEmpty() ? Double.NaN : sk.getMaxItem();
sb.append(LS).append("### Quantiles ").append(thisSimpleName).append(" SUMMARY: ")
.append(LS);
sb.append(" Empty : ").append(sk.isEmpty()).append(LS);
sb.append(" Memory, Capacity bytes : ").append(sk.hasMemory())
.append(", ").append(memCap).append(LS);
sb.append(" Estimation Mode : ").append(sk.isEstimationMode()).append(LS);
sb.append(" K : ").append(kStr).append(LS);
sb.append(" N : ").append(nStr).append(LS);
sb.append(" Levels (Needed, Total, Valid): ")
.append(neededLevels + ", " + totalLevels + ", " + validLevels).append(LS);
sb.append(" Level Bit Pattern : ")
.append(Long.toBinaryString(bitPattern)).append(LS);
sb.append(" BaseBufferCount : ").append(bbCntStr).append(LS);
sb.append(" Combined Buffer Capacity : ").append(combBufCapStr).append(LS);
sb.append(" Retained Items : ").append(retItemsStr).append(LS);
sb.append(" Compact Storage Bytes : ").append(cmptBytesStr).append(LS);
sb.append(" Updatable Storage Bytes : ").append(updtBytesStr).append(LS);
sb.append(" Normalized Rank Error : ").append(epsPctStr).append(LS);
sb.append(" Normalized Rank Error (PMF) : ").append(epsPmfPctStr).append(LS);
sb.append(" Min Item : ")
.append(String.format("%12.6e", minItem)).append(LS);
sb.append(" Max Item : ")
.append(String.format("%12.6e", maxItem)).append(LS);
sb.append("### END SKETCH SUMMARY").append(LS);
return sb.toString();
}
private static String getDataDetail(final DoublesSketch sketchIn) {
final DoublesSketch sketch = sketchIn.isCompact() ? copyToHeap(sketchIn) : sketchIn;
final StringBuilder sb = new StringBuilder();
final String skName = sketch.getClass().getSimpleName();
sb.append(LS).append("### Quantiles ").append(skName).append(" DATA DETAIL: ").append(LS);
final int k = sketch.getK();
final long n = sketch.getN();
final int bbCount = sketch.getBaseBufferCount();
final long bitPattern = sketch.getBitPattern();
final double[] combBuf = sketch.getCombinedBuffer();
//output the base buffer
sb.append(" BaseBuffer : ");
for (int i = 0; i < bbCount; i++) {
sb.append(String.format("%10.1f", combBuf[i]));
}
sb.append(LS);
//output all the levels
final int combBufSize = combBuf.length;
if (n >= (2 * k)) {
sb.append(" Valid | Level");
for (int j = 2 * k; j < combBufSize; j++) { //output level data starting at 2K
if ((j % k) == 0) { //start output of new level
final int levelNum = (j / k) - 2;
final String validLvl = ((1L << levelNum) & bitPattern) > 0 ? " T " : " F ";
final String lvl = String.format("%5d", levelNum);
sb.append(LS).append(" ").append(validLvl).append(" ").append(lvl).append(": ");
}
sb.append(String.format("%10.1f", combBuf[j]));
}
sb.append(LS);
}
sb.append("### END DATA DETAIL").append(LS);
return sb.toString();
}
}
| 2,806 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/DirectCompactDoublesSketch.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import static org.apache.datasketches.quantiles.ClassicUtil.DOUBLES_SER_VER;
import static org.apache.datasketches.quantiles.ClassicUtil.checkFamilyID;
import static org.apache.datasketches.quantiles.ClassicUtil.checkK;
import static org.apache.datasketches.quantiles.ClassicUtil.computeBaseBufferItems;
import static org.apache.datasketches.quantiles.ClassicUtil.computeBitPattern;
import static org.apache.datasketches.quantiles.ClassicUtil.computeRetainedItems;
import static org.apache.datasketches.quantiles.PreambleUtil.COMBINED_BUFFER;
import static org.apache.datasketches.quantiles.PreambleUtil.COMPACT_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.EMPTY_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.MAX_DOUBLE;
import static org.apache.datasketches.quantiles.PreambleUtil.MIN_DOUBLE;
import static org.apache.datasketches.quantiles.PreambleUtil.N_LONG;
import static org.apache.datasketches.quantiles.PreambleUtil.ORDERED_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.READ_ONLY_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.extractFamilyID;
import static org.apache.datasketches.quantiles.PreambleUtil.extractFlags;
import static org.apache.datasketches.quantiles.PreambleUtil.extractK;
import static org.apache.datasketches.quantiles.PreambleUtil.extractN;
import static org.apache.datasketches.quantiles.PreambleUtil.extractPreLongs;
import static org.apache.datasketches.quantiles.PreambleUtil.extractSerVer;
import static org.apache.datasketches.quantiles.PreambleUtil.insertFamilyID;
import static org.apache.datasketches.quantiles.PreambleUtil.insertFlags;
import static org.apache.datasketches.quantiles.PreambleUtil.insertK;
import static org.apache.datasketches.quantiles.PreambleUtil.insertMaxDouble;
import static org.apache.datasketches.quantiles.PreambleUtil.insertMinDouble;
import static org.apache.datasketches.quantiles.PreambleUtil.insertN;
import static org.apache.datasketches.quantiles.PreambleUtil.insertPreLongs;
import static org.apache.datasketches.quantiles.PreambleUtil.insertSerVer;
import java.util.Arrays;
import org.apache.datasketches.common.Family;
import org.apache.datasketches.common.SketchesArgumentException;
import org.apache.datasketches.memory.Memory;
import org.apache.datasketches.memory.WritableMemory;
import org.apache.datasketches.quantilescommon.QuantilesAPI;
/**
* Implements the DoublesSketch off-heap.
*
* @author Kevin Lang
* @author Lee Rhodes
* @author Jon Malkin
*/
final class DirectCompactDoublesSketch extends CompactDoublesSketch {
private static final int MIN_DIRECT_DOUBLES_SER_VER = 3;
private WritableMemory mem_;
//**CONSTRUCTORS**********************************************************
private DirectCompactDoublesSketch(final int k) {
super(k); //Checks k
}
/**
* Converts the given UpdateDoublesSketch to this compact form.
*
* @param sketch the sketch to convert
* @param dstMem the WritableMemory to use for the destination
* @return a DirectCompactDoublesSketch created from an UpdateDoublesSketch
*/
static DirectCompactDoublesSketch createFromUpdateSketch(final UpdateDoublesSketch sketch,
final WritableMemory dstMem) {
final long memCap = dstMem.getCapacity();
final int k = sketch.getK();
final long n = sketch.getN();
checkDirectMemCapacity(k, n, memCap);
//initialize dstMem
dstMem.putLong(0, 0L); //clear pre0
insertPreLongs(dstMem, 2);
insertSerVer(dstMem, DOUBLES_SER_VER);
insertFamilyID(dstMem, Family.QUANTILES.getID());
insertK(dstMem, k);
final int flags = COMPACT_FLAG_MASK | READ_ONLY_FLAG_MASK; // true for all compact sketches
if (sketch.isEmpty()) {
insertFlags(dstMem, flags | EMPTY_FLAG_MASK);
} else {
insertFlags(dstMem, flags);
insertN(dstMem, n);
insertMinDouble(dstMem, sketch.getMinItem());
insertMaxDouble(dstMem, sketch.getMaxItem());
final int bbCount = computeBaseBufferItems(k, n);
final DoublesSketchAccessor inputAccessor = DoublesSketchAccessor.wrap(sketch);
assert bbCount == inputAccessor.numItems();
long dstMemOffset = COMBINED_BUFFER;
// copy and sort base buffer
final double[] bbArray = inputAccessor.getArray(0, bbCount);
Arrays.sort(bbArray);
dstMem.putDoubleArray(dstMemOffset, bbArray, 0, bbCount);
dstMemOffset += bbCount << 3;
long bitPattern = computeBitPattern(k, n);
for (int lvl = 0; bitPattern > 0; ++lvl, bitPattern >>>= 1) {
if ((bitPattern & 1L) > 0L) {
inputAccessor.setLevel(lvl);
dstMem.putDoubleArray(dstMemOffset, inputAccessor.getArray(0, k), 0, k);
dstMemOffset += k << 3;
}
}
}
final DirectCompactDoublesSketch dcds = new DirectCompactDoublesSketch(k);
dcds.mem_ = dstMem;
return dcds;
}
/**
* Wrap this sketch around the given compact Memory image of a DoublesSketch.
*
* @param srcMem the given compact Memory image of a DoublesSketch that may have data,
* @return a sketch that wraps the given srcMem
*/
static DirectCompactDoublesSketch wrapInstance(final Memory srcMem) {
final long memCap = srcMem.getCapacity();
final int preLongs = extractPreLongs(srcMem);
final int serVer = extractSerVer(srcMem);
final int familyID = extractFamilyID(srcMem);
final int flags = extractFlags(srcMem);
final int k = extractK(srcMem);
final boolean empty = (flags & EMPTY_FLAG_MASK) > 0;
final long n = empty ? 0 : extractN(srcMem);
//VALIDITY CHECKS
DirectUpdateDoublesSketchR.checkPreLongs(preLongs);
checkFamilyID(familyID);
DoublesUtil.checkDoublesSerVer(serVer, MIN_DIRECT_DOUBLES_SER_VER);
checkCompact(serVer, flags);
checkK(k);
checkDirectMemCapacity(k, n, memCap);
DirectUpdateDoublesSketchR.checkEmptyAndN(empty, n);
final DirectCompactDoublesSketch dds = new DirectCompactDoublesSketch(k);
dds.mem_ = (WritableMemory) srcMem;
return dds;
}
@Override
public double getMaxItem() {
if (isEmpty()) { throw new IllegalArgumentException(QuantilesAPI.EMPTY_MSG); }
return mem_.getDouble(MAX_DOUBLE);
}
@Override
public double getMinItem() {
if (isEmpty()) { throw new IllegalArgumentException(QuantilesAPI.EMPTY_MSG); }
return mem_.getDouble(MIN_DOUBLE);
}
@Override
public long getN() {
return (mem_.getCapacity() < COMBINED_BUFFER) ? 0 : mem_.getLong(N_LONG);
}
@Override
public boolean hasMemory() {
return (mem_ != null);
}
@Override
public boolean isDirect() {
return (mem_ != null) ? mem_.isDirect() : false;
}
@Override
public boolean isSameResource(final Memory that) {
return mem_.isSameResource(that);
}
//Restricted overrides
//Gets
@Override
int getBaseBufferCount() {
return computeBaseBufferItems(getK(), getN());
}
@Override
int getCombinedBufferItemCapacity() {
return ((int)mem_.getCapacity() - COMBINED_BUFFER) / 8;
}
@Override
double[] getCombinedBuffer() {
final int k = getK();
if (isEmpty()) { return new double[k << 1]; } //2K
final long n = getN();
final int itemCap = computeRetainedItems(k, n);
final double[] combinedBuffer = new double[itemCap];
mem_.getDoubleArray(COMBINED_BUFFER, combinedBuffer, 0, itemCap);
return combinedBuffer;
}
@Override
long getBitPattern() {
final int k = getK();
final long n = getN();
return computeBitPattern(k, n);
}
@Override
WritableMemory getMemory() {
return mem_;
}
//Checks
/**
* Checks the validity of the direct memory capacity assuming n, k.
* @param k the given k
* @param n the given n
* @param memCapBytes the current memory capacity in bytes
*/
static void checkDirectMemCapacity(final int k, final long n, final long memCapBytes) {
final int reqBufBytes = getCompactSerialiedSizeBytes(k, n);
if (memCapBytes < reqBufBytes) {
throw new SketchesArgumentException("Possible corruption: Memory capacity too small: "
+ memCapBytes + " < " + reqBufBytes);
}
}
/**
* Checks a sketch's serial version and flags to see if the sketch can be wrapped as a
* DirectCompactDoubleSketch. Throws an exception if the sketch is neither empty nor compact
* and ordered, unles the sketch uses serialization version 2.
* @param serVer the serialization version
* @param flags Flags from the sketch to evaluate
*/
static void checkCompact(final int serVer, final int flags) {
final int compactFlagMask = COMPACT_FLAG_MASK | ORDERED_FLAG_MASK;
if ((serVer != 2)
&& ((flags & EMPTY_FLAG_MASK) == 0)
&& ((flags & compactFlagMask) != compactFlagMask)) {
throw new SketchesArgumentException(
"Possible corruption: Must be v2, empty, or compact and ordered. Flags field: "
+ Integer.toBinaryString(flags) + ", SerVer: " + serVer);
}
}
}
| 2,807 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/DoublesSketchBuilder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import static org.apache.datasketches.quantiles.ClassicUtil.LS;
import static org.apache.datasketches.quantiles.ClassicUtil.TAB;
import org.apache.datasketches.memory.WritableMemory;
/**
* For building a new quantiles DoublesSketch.
*
* @author Lee Rhodes
*/
public class DoublesSketchBuilder {
private int bK = PreambleUtil.DEFAULT_K;
/**
* Constructor for a new DoublesSketchBuilder. The default configuration is
* <ul>
* <li>k: 128. This produces a normalized rank error of about 1.7%</li>
* <li>Memory: null</li>
* </ul>
*/
public DoublesSketchBuilder() {}
/**
* Sets the parameter <i>k</i> that determines the accuracy and size of the sketch.
* @param k determines the accuracy and size of the sketch.
* It is recommended that <i>k</i> be a power of 2 to enable unioning of sketches with
* different <i>k</i>. It is only possible to union from
* larger <i>k</i> to smaller <i>k</i>.
* @return this builder
*/
public DoublesSketchBuilder setK(final int k) {
ClassicUtil.checkK(k);
bK = k;
return this;
}
/**
* Gets the current configured <i>k</i>
* @return the current configured <i>k</i>
*/
public int getK() {
return bK;
}
/**
* Returns an UpdateDoublesSketch with the current configuration of this Builder.
* @return a UpdateDoublesSketch
*/
public UpdateDoublesSketch build() {
return HeapUpdateDoublesSketch.newInstance(bK);
}
/**
* Returns a quantiles UpdateDoublesSketch with the current configuration of this builder
* and the specified backing destination Memory store.
* @param dstMem destination memory for use by the sketch
* @return an UpdateDoublesSketch
*/
public UpdateDoublesSketch build(final WritableMemory dstMem) {
return DirectUpdateDoublesSketch.newInstance(bK, dstMem);
}
/**
* Creates a human readable string that describes the current configuration of this builder.
*/
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("QuantileSketchBuilder configuration:").append(LS);
sb.append("K : ").append(TAB).append(bK).append(LS);
return sb.toString();
}
}
| 2,808 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/DoublesSketch.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import static java.lang.Math.max;
import static java.lang.Math.min;
import static org.apache.datasketches.common.Util.ceilingIntPowerOf2;
import static org.apache.datasketches.quantiles.ClassicUtil.MAX_PRELONGS;
import static org.apache.datasketches.quantiles.ClassicUtil.MIN_K;
import static org.apache.datasketches.quantiles.ClassicUtil.checkIsCompactMemory;
import static org.apache.datasketches.quantiles.ClassicUtil.checkK;
import static org.apache.datasketches.quantiles.ClassicUtil.computeNumLevelsNeeded;
import static org.apache.datasketches.quantiles.ClassicUtil.computeRetainedItems;
import static org.apache.datasketches.quantilescommon.QuantilesUtil.equallyWeightedRanks;
import java.util.Random;
import org.apache.datasketches.common.SketchesArgumentException;
import org.apache.datasketches.memory.Memory;
import org.apache.datasketches.memory.WritableMemory;
import org.apache.datasketches.quantilescommon.DoublesSortedView;
import org.apache.datasketches.quantilescommon.QuantileSearchCriteria;
import org.apache.datasketches.quantilescommon.QuantilesAPI;
import org.apache.datasketches.quantilescommon.QuantilesDoublesAPI;
import org.apache.datasketches.quantilescommon.QuantilesDoublesSketchIterator;
/**
* This is an implementation of the Low Discrepancy Mergeable Quantiles Sketch, using doubles,
* described in section 3.2 of the journal version of the paper "Mergeable Summaries"
* by Agarwal, Cormode, Huang, Phillips, Wei, and Yi:
*
* <p>Reference: <a href="http://dblp.org/rec/html/journals/tods/AgarwalCHPWY13"></a></p>
*
* <p>A <i>k</i> of 128 produces a normalized, rank error of about 1.7%.
* For example, the median returned from getQuantile(0.5) will be between the actual quantiles
* from the hypothetically sorted array of input quantiles at normalized ranks of 0.483 and 0.517, with
* a confidence of about 99%.</p>
*
* <pre>
Table Guide for DoublesSketch Size in Bytes and Approximate Error:
K => | 16 32 64 128 256 512 1,024
~ Error => | 12.145% 6.359% 3.317% 1.725% 0.894% 0.463% 0.239%
N | Size in Bytes ->
------------------------------------------------------------------------
0 | 8 8 8 8 8 8 8
1 | 72 72 72 72 72 72 72
3 | 72 72 72 72 72 72 72
7 | 104 104 104 104 104 104 104
15 | 168 168 168 168 168 168 168
31 | 296 296 296 296 296 296 296
63 | 424 552 552 552 552 552 552
127 | 552 808 1,064 1,064 1,064 1,064 1,064
255 | 680 1,064 1,576 2,088 2,088 2,088 2,088
511 | 808 1,320 2,088 3,112 4,136 4,136 4,136
1,023 | 936 1,576 2,600 4,136 6,184 8,232 8,232
2,047 | 1,064 1,832 3,112 5,160 8,232 12,328 16,424
4,095 | 1,192 2,088 3,624 6,184 10,280 16,424 24,616
8,191 | 1,320 2,344 4,136 7,208 12,328 20,520 32,808
16,383 | 1,448 2,600 4,648 8,232 14,376 24,616 41,000
32,767 | 1,576 2,856 5,160 9,256 16,424 28,712 49,192
65,535 | 1,704 3,112 5,672 10,280 18,472 32,808 57,384
131,071 | 1,832 3,368 6,184 11,304 20,520 36,904 65,576
262,143 | 1,960 3,624 6,696 12,328 22,568 41,000 73,768
524,287 | 2,088 3,880 7,208 13,352 24,616 45,096 81,960
1,048,575 | 2,216 4,136 7,720 14,376 26,664 49,192 90,152
2,097,151 | 2,344 4,392 8,232 15,400 28,712 53,288 98,344
4,194,303 | 2,472 4,648 8,744 16,424 30,760 57,384 106,536
8,388,607 | 2,600 4,904 9,256 17,448 32,808 61,480 114,728
16,777,215 | 2,728 5,160 9,768 18,472 34,856 65,576 122,920
33,554,431 | 2,856 5,416 10,280 19,496 36,904 69,672 131,112
67,108,863 | 2,984 5,672 10,792 20,520 38,952 73,768 139,304
134,217,727 | 3,112 5,928 11,304 21,544 41,000 77,864 147,496
268,435,455 | 3,240 6,184 11,816 22,568 43,048 81,960 155,688
536,870,911 | 3,368 6,440 12,328 23,592 45,096 86,056 163,880
1,073,741,823 | 3,496 6,696 12,840 24,616 47,144 90,152 172,072
2,147,483,647 | 3,624 6,952 13,352 25,640 49,192 94,248 180,264
4,294,967,295 | 3,752 7,208 13,864 26,664 51,240 98,344 188,456
* </pre>
*
* @see QuantilesAPI
*/
public abstract class DoublesSketch implements QuantilesDoublesAPI {
/**
* Setting the seed makes the results of the sketch deterministic if the input quantiles are
* received in exactly the same order. This is only useful when performing test comparisons,
* otherwise is not recommended.
*/
static Random rand = new Random();
/**
* Parameter that controls space usage of sketch and accuracy of estimates.
*/
final int k_;
DoublesSketchSortedView classicQdsSV = null;
DoublesSketch(final int k) {
checkK(k);
k_ = k;
}
synchronized static void setRandom(final long seed) {
DoublesSketch.rand = new Random(seed);
}
/**
* Returns a new builder
* @return a new builder
*/
public static final DoublesSketchBuilder builder() {
return new DoublesSketchBuilder();
}
/**
* Heapify takes the sketch image in Memory and instantiates an on-heap Sketch.
* The resulting sketch will not retain any link to the source Memory.
* @param srcMem a Memory image of a Sketch.
* <a href="{@docRoot}/resources/dictionary.html#mem">See Memory</a>
* @return a heap-based Sketch based on the given Memory
*/
public static DoublesSketch heapify(final Memory srcMem) {
if (checkIsCompactMemory(srcMem)) {
return CompactDoublesSketch.heapify(srcMem);
}
return UpdateDoublesSketch.heapify(srcMem);
}
/**
* Wrap this sketch around the given Memory image of a DoublesSketch, compact or updatable.
* A DirectUpdateDoublesSketch can only wrap an updatable array, and a
* DirectCompactDoublesSketch can only wrap a compact array.
*
* @param srcMem the given Memory image of a DoublesSketch that may have data,
* @return a sketch that wraps the given srcMem
*/
public static DoublesSketch wrap(final Memory srcMem) {
if (checkIsCompactMemory(srcMem)) {
return DirectCompactDoublesSketch.wrapInstance(srcMem);
}
return DirectUpdateDoublesSketchR.wrapInstance(srcMem);
}
@Override
public double[] getCDF(final double[] splitPoints, final QuantileSearchCriteria searchCrit) {
if (isEmpty()) { throw new IllegalArgumentException(QuantilesAPI.EMPTY_MSG); }
refreshSortedView();
return classicQdsSV.getCDF(splitPoints, searchCrit);
}
@Override
public abstract double getMaxItem();
@Override
public abstract double getMinItem();
@Override
public DoublesPartitionBoundaries getPartitionBoundaries(final int numEquallyWeighted,
final QuantileSearchCriteria searchCrit) {
if (isEmpty()) { throw new IllegalArgumentException(QuantilesAPI.EMPTY_MSG); }
final double[] ranks = equallyWeightedRanks(numEquallyWeighted);
final double[] boundaries = getQuantiles(ranks, searchCrit);
boundaries[0] = getMinItem();
boundaries[boundaries.length - 1] = getMaxItem();
final DoublesPartitionBoundaries dpb = new DoublesPartitionBoundaries();
dpb.N = this.getN();
dpb.ranks = ranks;
dpb.boundaries = boundaries;
return dpb;
}
@Override
public double[] getPMF(final double[] splitPoints, final QuantileSearchCriteria searchCrit) {
if (isEmpty()) { throw new IllegalArgumentException(QuantilesAPI.EMPTY_MSG); }
refreshSortedView();
return classicQdsSV.getPMF(splitPoints, searchCrit);
}
@Override
public double getQuantile(final double rank, final QuantileSearchCriteria searchCrit) {
if (isEmpty()) { throw new IllegalArgumentException(QuantilesAPI.EMPTY_MSG); }
refreshSortedView();
return classicQdsSV.getQuantile(rank, searchCrit);
}
@Override
public double[] getQuantiles(final double[] ranks, final QuantileSearchCriteria searchCrit) {
if (isEmpty()) { throw new IllegalArgumentException(QuantilesAPI.EMPTY_MSG); }
refreshSortedView();
final int len = ranks.length;
final double[] quantiles = new double[len];
for (int i = 0; i < len; i++) {
quantiles[i] = classicQdsSV.getQuantile(ranks[i], searchCrit);
}
return quantiles;
}
/**
* {@inheritDoc}
* The approximate probability that the true quantile is within the confidence interval
* specified by the upper and lower quantile bounds for this sketch is 0.99.
*/
@Override
public double getQuantileLowerBound(final double rank) {
return getQuantile(max(0, rank - getNormalizedRankError(k_, false)));
}
/**
* {@inheritDoc}
* The approximate probability that the true quantile is within the confidence interval
* specified by the upper and lower quantile bounds for this sketch is 0.99.
*/
@Override
public double getQuantileUpperBound(final double rank) {
return getQuantile(min(1.0, rank + getNormalizedRankError(k_, false)));
}
@Override
public double getRank(final double quantile, final QuantileSearchCriteria searchCrit) {
if (isEmpty()) { throw new IllegalArgumentException(QuantilesAPI.EMPTY_MSG); }
refreshSortedView();
return classicQdsSV.getRank(quantile, searchCrit);
}
/**
* {@inheritDoc}
* The approximate probability that the true rank is within the confidence interval
* specified by the upper and lower rank bounds for this sketch is 0.99.
*/
@Override
public double getRankLowerBound(final double rank) {
return max(0.0, rank - getNormalizedRankError(k_, false));
}
/**
* {@inheritDoc}
* The approximate probability that the true rank is within the confidence interval
* specified by the upper and lower rank bounds for this sketch is 0.99.
*/
@Override
public double getRankUpperBound(final double rank) {
return min(1.0, rank + getNormalizedRankError(k_, false));
}
@Override
public double[] getRanks(final double[] quantiles, final QuantileSearchCriteria searchCrit) {
if (isEmpty()) { throw new IllegalArgumentException(QuantilesAPI.EMPTY_MSG); }
refreshSortedView();
final int len = quantiles.length;
final double[] ranks = new double[len];
for (int i = 0; i < len; i++) {
ranks[i] = classicQdsSV.getRank(quantiles[i], searchCrit);
}
return ranks;
}
@Override
public int getK() {
return k_;
}
@Override
public abstract long getN();
/**
* Gets the approximate rank error of this sketch normalized as a fraction between zero and one.
* The epsilon returned is a best fit to 99 percent confidence empirically measured max error
* in thousands of trials.
* @param pmf if true, returns the "double-sided" normalized rank error for the getPMF() function.
* Otherwise, it is the "single-sided" normalized rank error for all the other queries.
* @return if pmf is true, returns the normalized rank error for the getPMF() function.
* Otherwise, it is the "single-sided" normalized rank error for all the other queries.
*/
public double getNormalizedRankError(final boolean pmf) {
return getNormalizedRankError(k_, pmf);
}
/**
* Gets the normalized rank error given k and pmf.
* Static method version of the <i>getNormalizedRankError(boolean)</i>.
* The epsilon returned is a best fit to 99 percent confidence empirically measured max error
* in thousands of trials.
* @param k the configuration parameter
* @param pmf if true, returns the "double-sided" normalized rank error for the getPMF() function.
* Otherwise, it is the "single-sided" normalized rank error for all the other queries.
* @return if pmf is true, the normalized rank error for the getPMF() function.
* Otherwise, it is the "single-sided" normalized rank error for all the other queries.
*/
public static double getNormalizedRankError(final int k, final boolean pmf) {
return ClassicUtil.getNormalizedRankError(k, pmf);
}
/**
* Gets the approximate <em>k</em> to use given epsilon, the normalized rank error.
* @param epsilon the normalized rank error between zero and one.
* @param pmf if true, this function returns <em>k</em> assuming the input epsilon
* is the desired "double-sided" epsilon for the getPMF() function. Otherwise, this function
* returns <em>k</em> assuming the input epsilon is the desired "single-sided"
* epsilon for all the other queries.
* @return <i>k</i> given epsilon.
*/
public static int getKFromEpsilon(final double epsilon, final boolean pmf) {
return ClassicUtil.getKFromEpsilon(epsilon, pmf);
}
@Override
public abstract boolean hasMemory();
@Override
public abstract boolean isDirect();
@Override
public boolean isEmpty() {
return getN() == 0;
}
@Override
public boolean isEstimationMode() {
return getN() >= 2L * k_;
}
@Override
public abstract boolean isReadOnly();
/**
* Returns true if the backing resource of <i>this</i> is identical with the backing resource
* of <i>that</i>. The capacities must be the same. If <i>this</i> is a region,
* the region offset must also be the same.
* @param that A different non-null object
* @return true if the backing resource of <i>this</i> is the same as the backing resource
* of <i>that</i>.
*/
public boolean isSameResource(final Memory that) { //Overridden by direct sketches
return false;
}
@Override
public byte[] toByteArray() {
if (isCompact()) {
return toByteArray(true);
}
return toByteArray(false);
}
/**
* Serialize this sketch in a byte array form.
* @param compact if true the sketch will be serialized in compact form.
* DirectCompactDoublesSketch can wrap() only a compact byte array;
* DirectUpdateDoublesSketch can wrap() only a updatable byte array.
* @return this sketch in a byte array form.
*/
public byte[] toByteArray(final boolean compact) {
return DoublesByteArrayImpl.toByteArray(this, compact, compact);
}
@Override
public String toString() {
return toString(true, false);
}
/**
* Returns summary information about this sketch. Used for debugging.
* @param sketchSummary if true includes sketch summary
* @param dataDetail if true includes data detail
* @return summary information about the sketch.
*/
public String toString(final boolean sketchSummary, final boolean dataDetail) {
return DoublesUtil.toString(sketchSummary, dataDetail, this);
}
/**
* Returns a human readable string of the preamble of a byte array image of a DoublesSketch.
* @param byteArr the given byte array
* @return a human readable string of the preamble of a byte array image of a DoublesSketch.
*/
public static String toString(final byte[] byteArr) {
return PreambleUtil.toString(byteArr, true);
}
/**
* Returns a human readable string of the preamble of a Memory image of a DoublesSketch.
* @param mem the given Memory
* @return a human readable string of the preamble of a Memory image of a DoublesSketch.
*/
public static String toString(final Memory mem) {
return PreambleUtil.toString(mem, true);
}
/**
* From an source sketch, create a new sketch that must have a smaller K.
* The original sketch is not modified.
*
* @param srcSketch the sourcing sketch
* @param smallerK the new sketch's K that must be smaller than this K.
* It is required that this.getK() = smallerK * 2^(nonnegative integer).
* @param dstMem the destination Memory. It must not overlap the Memory of this sketch.
* If null, a heap sketch will be returned, otherwise it will be off-heap.
*
* @return the new sketch.
*/
public DoublesSketch downSample(final DoublesSketch srcSketch, final int smallerK,
final WritableMemory dstMem) {
return downSampleInternal(srcSketch, smallerK, dstMem);
}
@Override
public int getNumRetained() {
return computeRetainedItems(getK(), getN());
}
/**
* Returns the current number of bytes this sketch would require to store in the compact Memory Format.
* @return the current number of bytes this sketch would require to store in the compact Memory Format.
*/
public int getCurrentCompactSerializedSizeBytes() {
return getCompactSerialiedSizeBytes(getK(), getN());
}
/**
* Returns the number of bytes a DoublesSketch would require to store in compact form
* given <i>k</i> and <i>n</i>. The compact form is not updatable.
* @param k the size configuration parameter for the sketch
* @param n the number of quantiles input into the sketch
* @return the number of bytes required to store this sketch in compact form.
*/
public static int getCompactSerialiedSizeBytes(final int k, final long n) {
if (n == 0) { return 8; }
final int metaPreLongs = MAX_PRELONGS + 2; //plus min, max
return metaPreLongs + computeRetainedItems(k, n) << 3;
}
@Override
public int getSerializedSizeBytes() {
if (isCompact()) { return getCurrentCompactSerializedSizeBytes(); }
return getCurrentUpdatableSerializedSizeBytes();
}
/**
* Returns the current number of bytes this sketch would require to store in the updatable Memory Format.
* @return the current number of bytes this sketch would require to store in the updatable Memory Format.
*/
public int getCurrentUpdatableSerializedSizeBytes() {
return getUpdatableStorageBytes(getK(), getN());
}
/**
* Returns the number of bytes a sketch would require to store in updatable form.
* This uses roughly 2X the storage of the compact form
* given <i>k</i> and <i>n</i>.
* @param k the size configuration parameter for the sketch
* @param n the number of quantiles input into the sketch
* @return the number of bytes this sketch would require to store in updatable form.
*/
public static int getUpdatableStorageBytes(final int k, final long n) {
if (n == 0) { return 8; }
final int metaPre = MAX_PRELONGS + 2; //plus min, max
final int totLevels = computeNumLevelsNeeded(k, n);
if (n <= k) {
final int ceil = Math.max(ceilingIntPowerOf2((int)n), MIN_K * 2);
return metaPre + ceil << 3;
}
return metaPre + (2 + totLevels) * k << 3;
}
/**
* Puts the current sketch into the given Memory in compact form if there is sufficient space,
* otherwise, it throws an error.
*
* @param dstMem the given memory.
*/
public void putMemory(final WritableMemory dstMem) {
putMemory(dstMem, true);
}
/**
* Puts the current sketch into the given Memory if there is sufficient space, otherwise,
* throws an error.
*
* @param dstMem the given memory.
* @param compact if true, compacts and sorts the base buffer, which optimizes merge
* performance at the cost of slightly increased serialization time.
*/
public void putMemory(final WritableMemory dstMem, final boolean compact) {
if (hasMemory() && isCompact() == compact) {
final Memory srcMem = getMemory();
srcMem.copyTo(0, dstMem, 0, getSerializedSizeBytes());
} else {
final byte[] byteArr = toByteArray(compact);
final int arrLen = byteArr.length;
final long memCap = dstMem.getCapacity();
if (memCap < arrLen) {
throw new SketchesArgumentException(
"Destination Memory not large enough: " + memCap + " < " + arrLen);
}
dstMem.putByteArray(0, byteArr, 0, arrLen);
}
}
@Override
public QuantilesDoublesSketchIterator iterator() {
return new DoublesSketchIterator(this, getBitPattern());
}
@Override
public DoublesSortedView getSortedView() {
return new DoublesSketchSortedView(this);
}
/**
* {@inheritDoc}
* <p>The parameter <i>k</i> will not change.</p>
*/
@Override
public abstract void reset();
//Restricted
/*
* DoublesMergeImpl.downSamplingMergeInto requires the target sketch to implement update(), so
* we ensure that the target is an UpdateSketch. The public API, on the other hand, just
* specifies a DoublesSketch. This lets us be more specific about the type without changing the
* public API.
*/
UpdateDoublesSketch downSampleInternal(final DoublesSketch srcSketch, final int smallerK,
final WritableMemory dstMem) {
final UpdateDoublesSketch newSketch = dstMem == null
? HeapUpdateDoublesSketch.newInstance(smallerK)
: DirectUpdateDoublesSketch.newInstance(smallerK, dstMem);
if (srcSketch.isEmpty()) { return newSketch; }
DoublesMergeImpl.downSamplingMergeInto(srcSketch, newSketch);
return newSketch;
}
private final void refreshSortedView() {
classicQdsSV = (classicQdsSV == null) ? new DoublesSketchSortedView(this) : classicQdsSV;
}
//Restricted abstract
/**
* Returns true if this sketch is compact
* @return true if this sketch is compact
*/
abstract boolean isCompact();
/**
* Returns the base buffer count
* @return the base buffer count
*/
abstract int getBaseBufferCount();
/**
* Returns the bit pattern for valid log levels
* @return the bit pattern for valid log levels
*/
abstract long getBitPattern();
/**
* Returns the capacity for the combined base buffer
* @return the capacity for the combined base buffer
*/
abstract int getCombinedBufferItemCapacity();
/**
* Returns the combined buffer, in non-compact form.
* @return the combined buffer, in non-compact form.
*/
abstract double[] getCombinedBuffer();
/**
* Gets the Memory if it exists, otherwise returns null.
* @return the Memory if it exists, otherwise returns null.
*/
abstract WritableMemory getMemory();
}
| 2,809 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/DoublesSketchSortedView.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import static java.lang.System.arraycopy;
import static org.apache.datasketches.quantiles.DoublesSketchAccessor.BB_LVL_IDX;
import static org.apache.datasketches.quantilescommon.QuantileSearchCriteria.INCLUSIVE;
import java.util.Arrays;
import org.apache.datasketches.common.SketchesStateException;
import org.apache.datasketches.quantilescommon.DoublesSortedView;
import org.apache.datasketches.quantilescommon.InequalitySearch;
import org.apache.datasketches.quantilescommon.QuantileSearchCriteria;
import org.apache.datasketches.quantilescommon.QuantilesAPI;
import org.apache.datasketches.quantilescommon.QuantilesUtil;
/**
* The SortedView of the Classic Quantiles DoublesSketch.
* @author Alexander Saydakov
* @author Lee Rhodes
*/
public final class DoublesSketchSortedView implements DoublesSortedView {
private final double[] quantiles;
private final long[] cumWeights; //comes in as individual weights, converted to cumulative natural weights
private final long totalN;
/**
* Construct from elements for testing.
* @param quantiles sorted array of quantiles
* @param cumWeights sorted, monotonically increasing cumulative weights.
* @param totalN the total number of items presented to the sketch.
*/
DoublesSketchSortedView(final double[] quantiles, final long[] cumWeights, final long totalN) {
this.quantiles = quantiles;
this.cumWeights = cumWeights;
this.totalN = totalN;
}
/**
* Constructs this Sorted View given the sketch
* @param sketch the given Classic Quantiles DoublesSketch
*/
public DoublesSketchSortedView(final DoublesSketch sketch) {
this.totalN = sketch.getN();
final int k = sketch.getK();
final int numQuantiles = sketch.getNumRetained();
quantiles = new double[numQuantiles];
cumWeights = new long[numQuantiles];
final DoublesSketchAccessor sketchAccessor = DoublesSketchAccessor.wrap(sketch);
// Populate from DoublesSketch:
// copy over the "levels" and then the base buffer, all with appropriate weights
populateFromDoublesSketch(k, totalN, sketch.getBitPattern(), sketchAccessor, quantiles, cumWeights);
// Sort the first "numSamples" slots of the two arrays in tandem,
// taking advantage of the already sorted blocks of length k
blockyTandemMergeSort(quantiles, cumWeights, numQuantiles, k);
if (convertToCumulative(cumWeights) != totalN) {
throw new SketchesStateException("Sorted View is misconfigured. TotalN does not match cumWeights.");
}
}
@Override
public double getQuantile(final double rank, final QuantileSearchCriteria searchCrit) {
if (isEmpty()) { throw new IllegalArgumentException(QuantilesAPI.EMPTY_MSG); }
QuantilesUtil.checkNormalizedRankBounds(rank);
final int len = cumWeights.length;
final long naturalRank = (searchCrit == INCLUSIVE)
? (long)Math.ceil(rank * totalN) : (long)Math.floor(rank * totalN);
final InequalitySearch crit = (searchCrit == INCLUSIVE) ? InequalitySearch.GE : InequalitySearch.GT;
final int index = InequalitySearch.find(cumWeights, 0, len - 1, naturalRank, crit);
if (index == -1) {
return quantiles[quantiles.length - 1]; //EXCLUSIVE (GT) case: normRank == 1.0;
}
return quantiles[index];
}
@Override
public double getRank(final double quantile, final QuantileSearchCriteria searchCrit) {
if (isEmpty()) { throw new IllegalArgumentException(QuantilesAPI.EMPTY_MSG); }
final int len = quantiles.length;
final InequalitySearch crit = (searchCrit == INCLUSIVE) ? InequalitySearch.LE : InequalitySearch.LT;
final int index = InequalitySearch.find(quantiles, 0, len - 1, quantile, crit);
if (index == -1) {
return 0; //EXCLUSIVE (LT) case: quantile <= minQuantile; INCLUSIVE (LE) case: quantile < minQuantile
}
return (double)cumWeights[index] / totalN;
}
@Override
public long[] getCumulativeWeights() {
return cumWeights.clone();
}
@Override
public double[] getQuantiles() {
return quantiles.clone();
}
@Override
public boolean isEmpty() {
return totalN == 0;
}
@Override
public DoublesSketchSortedViewIterator iterator() {
return new DoublesSketchSortedViewIterator(quantiles, cumWeights);
}
//restricted methods
/**
* Populate the arrays and registers from a DoublesSketch
* @param k K parameter of the sketch
* @param n The current size of the stream
* @param bitPattern the bit pattern for valid log levels
* @param sketchAccessor A DoublesSketchAccessor around the sketch
* @param quantilesArr the consolidated array of all items from the sketch
* @param cumWtsArr populates this array with the raw individual weights from the sketch,
* it will be cumulative later.
*/
private final static void populateFromDoublesSketch(
final int k, final long n, final long bitPattern,
final DoublesSketchAccessor sketchAccessor,
final double[] quantilesArr, final long[] cumWtsArr) {
long weight = 1;
int nxt = 0;
long bits = bitPattern;
assert bits == (n / (2L * k)); // internal consistency check
for (int lvl = 0; bits != 0L; lvl++, bits >>>= 1) {
weight *= 2;
if ((bits & 1L) > 0L) {
sketchAccessor.setLevel(lvl);
for (int i = 0; i < sketchAccessor.numItems(); i++) {
quantilesArr[nxt] = sketchAccessor.get(i);
cumWtsArr[nxt] = weight;
nxt++;
}
}
}
weight = 1; //NOT a mistake! We just copied the highest level; now we need to copy the base buffer
final int startOfBaseBufferBlock = nxt;
// Copy BaseBuffer over, along with weight = 1
sketchAccessor.setLevel(BB_LVL_IDX);
for (int i = 0; i < sketchAccessor.numItems(); i++) {
quantilesArr[nxt] = sketchAccessor.get(i);
cumWtsArr[nxt] = weight;
nxt++;
}
assert nxt == quantilesArr.length;
// Must sort the items that came from the base buffer.
// Don't need to sort the corresponding weights because they are all the same.
final int numSamples = nxt;
Arrays.sort(quantilesArr, startOfBaseBufferBlock, numSamples);
}
/**
* blockyTandemMergeSort() is an implementation of top-down merge sort specialized
* for the case where the input contains successive equal-length blocks
* that have already been sorted, so that only the top part of the
* merge tree remains to be executed. Also, two arrays are sorted in tandem,
* as discussed below.
* @param quantiles array of quantiles
* @param cumWts array of cum weights
* @param arrLen length of quantiles array and cumWts array
* @param blkSize size of internal sorted blocks
*/
//used by this and UtilTest
static void blockyTandemMergeSort(final double[] quantiles, final long[] cumWts, final int arrLen,
final int blkSize) {
assert blkSize >= 1;
if (arrLen <= blkSize) { return; }
int numblks = arrLen / blkSize;
if ((numblks * blkSize) < arrLen) { numblks += 1; }
assert ((numblks * blkSize) >= arrLen);
// duplication of the input arrays is preparation for the "ping-pong" copy reduction strategy.
final double[] qSrc = Arrays.copyOf(quantiles, arrLen);
final long[] cwSrc = Arrays.copyOf(cumWts, arrLen);
blockyTandemMergeSortRecursion(qSrc, cwSrc,
quantiles, cumWts,
0, numblks,
blkSize, arrLen);
}
/**
* blockyTandemMergeSortRecursion() is called by blockyTandemMergeSort().
* In addition to performing the algorithm's top down recursion,
* it manages the buffer swapping that eliminates most copying.
* It also maps the input's pre-sorted blocks into the subarrays
* that are processed by tandemMerge().
* @param qSrc source array of quantiles
* @param cwSrc source weights array
* @param qDst destination quantiles array
* @param cwDst destination weights array
* @param grpStart group start, refers to pre-sorted blocks such as block 0, block 1, etc.
* @param grpLen group length, refers to pre-sorted blocks such as block 0, block 1, etc.
* @param blkSize block size
* @param arrLim array limit
*/
private static void blockyTandemMergeSortRecursion(final double[] qSrc, final long[] cwSrc,
final double[] qDst, final long[] cwDst, final int grpStart, final int grpLen,
/* indices of blocks */ final int blkSize, final int arrLim) {
// Important note: grpStart and grpLen do NOT refer to positions in the underlying array.
// Instead, they refer to the pre-sorted blocks, such as block 0, block 1, etc.
assert (grpLen > 0);
if (grpLen == 1) { return; }
final int grpLen1 = grpLen / 2;
final int grpLen2 = grpLen - grpLen1;
assert (grpLen1 >= 1);
assert (grpLen2 >= grpLen1);
final int grpStart1 = grpStart;
final int grpStart2 = grpStart + grpLen1;
//swap roles of src and dst
blockyTandemMergeSortRecursion(qDst, cwDst,
qSrc, cwSrc,
grpStart1, grpLen1, blkSize, arrLim);
//swap roles of src and dst
blockyTandemMergeSortRecursion(qDst, cwDst,
qSrc, cwSrc,
grpStart2, grpLen2, blkSize, arrLim);
// here we convert indices of blocks into positions in the underlying array.
final int arrStart1 = grpStart1 * blkSize;
final int arrStart2 = grpStart2 * blkSize;
final int arrLen1 = grpLen1 * blkSize;
int arrLen2 = grpLen2 * blkSize;
// special case for the final block which might be shorter than blkSize.
if ((arrStart2 + arrLen2) > arrLim) { arrLen2 = arrLim - arrStart2; }
tandemMerge(qSrc, cwSrc,
arrStart1, arrLen1,
arrStart2, arrLen2,
qDst, cwDst,
arrStart1); // which will be arrStart3
}
/**
* Performs two merges in tandem. One of them provides the sort keys
* while the other one passively undergoes the same data motion.
* @param qSrc quantiles source
* @param cwSrc cumulative weights source
* @param arrStart1 Array 1 start offset
* @param arrLen1 Array 1 length
* @param arrStart2 Array 2 start offset
* @param arrLen2 Array 2 length
* @param qDst quantiles destination
* @param cwDst cumulative weights destination
* @param arrStart3 Array 3 start offset
*/
private static void tandemMerge(final double[] qSrc, final long[] cwSrc,
final int arrStart1, final int arrLen1,
final int arrStart2, final int arrLen2,
final double[] qDst, final long[] cwDst,
final int arrStart3) {
final int arrStop1 = arrStart1 + arrLen1;
final int arrStop2 = arrStart2 + arrLen2;
int i1 = arrStart1;
int i2 = arrStart2;
int i3 = arrStart3;
while ((i1 < arrStop1) && (i2 < arrStop2)) {
if (qSrc[i2] < qSrc[i1]) {
qDst[i3] = qSrc[i2];
cwDst[i3] = cwSrc[i2];
i2++;
} else {
qDst[i3] = qSrc[i1];
cwDst[i3] = cwSrc[i1];
i1++;
}
i3++;
}
if (i1 < arrStop1) {
arraycopy(qSrc, i1, qDst, i3, arrStop1 - i1);
arraycopy(cwSrc, i1, cwDst, i3, arrStop1 - i1);
} else {
assert i2 < arrStop2;
arraycopy(qSrc, i2, qDst, i3, arrStop2 - i2);
arraycopy(cwSrc, i2, cwDst, i3, arrStop2 - i2);
}
}
/**
* Convert the individual weights into cumulative weights.
* An array of {1,1,1,1} becomes {1,2,3,4}
* @param array of actual weights from the sketch, none of the weights may be zero
* @return total weight
*/
private static long convertToCumulative(final long[] array) {
long subtotal = 0;
for (int i = 0; i < array.length; i++) {
final long newSubtotal = subtotal + array[i];
subtotal = array[i] = newSubtotal;
}
return subtotal;
}
}
| 2,810 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/HeapUpdateDoublesSketch.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import static org.apache.datasketches.quantiles.ClassicUtil.MIN_K;
import static org.apache.datasketches.quantiles.ClassicUtil.checkFamilyID;
import static org.apache.datasketches.quantiles.ClassicUtil.checkHeapFlags;
import static org.apache.datasketches.quantiles.ClassicUtil.computeBaseBufferItems;
import static org.apache.datasketches.quantiles.ClassicUtil.computeBitPattern;
import static org.apache.datasketches.quantiles.ClassicUtil.computeCombinedBufferItemCapacity;
import static org.apache.datasketches.quantiles.ClassicUtil.computeNumLevelsNeeded;
import static org.apache.datasketches.quantiles.ClassicUtil.computeRetainedItems;
import static org.apache.datasketches.quantiles.PreambleUtil.COMPACT_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.EMPTY_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.MAX_DOUBLE;
import static org.apache.datasketches.quantiles.PreambleUtil.MIN_DOUBLE;
import static org.apache.datasketches.quantiles.PreambleUtil.extractFamilyID;
import static org.apache.datasketches.quantiles.PreambleUtil.extractFlags;
import static org.apache.datasketches.quantiles.PreambleUtil.extractK;
import static org.apache.datasketches.quantiles.PreambleUtil.extractN;
import static org.apache.datasketches.quantiles.PreambleUtil.extractPreLongs;
import static org.apache.datasketches.quantiles.PreambleUtil.extractSerVer;
import java.util.Arrays;
import org.apache.datasketches.common.Family;
import org.apache.datasketches.common.SketchesArgumentException;
import org.apache.datasketches.memory.Memory;
import org.apache.datasketches.memory.WritableMemory;
import org.apache.datasketches.quantilescommon.QuantilesAPI;
/**
* Implements the DoublesSketch on the Java heap.
*
* @author Lee Rhodes
* @author Jon Malkin
*/
final class HeapUpdateDoublesSketch extends UpdateDoublesSketch {
static final int MIN_HEAP_DOUBLES_SER_VER = 1;
/**
* The smallest item ever seen in the stream.
*/
private double minItem_;
/**
* The largest item ever seen in the stream.
*/
private double maxItem_;
/**
* The total count of items seen.
*/
private long n_;
/**
* Number of items currently in base buffer.
*
* <p>Count = N % (2*K)</p>
*/
private int baseBufferCount_;
/**
* Active levels expressed as a bit pattern.
*
* <p>Pattern = N / (2 * K)</p>
*/
private long bitPattern_;
/**
* This single array contains the base buffer plus all levels some of which may not be used,
* i.e, is in non-compact form.
* A level is of size K and is either full and sorted, or not used. A "not used" buffer may have
* garbage. Whether a level buffer used or not is indicated by the bitPattern_.
* The base buffer has length 2*K but might not be full and isn't necessarily sorted.
* The base buffer precedes the level buffers. This buffer does not include the min, max items.
*
* <p>The levels arrays require quite a bit of explanation, which we defer until later.</p>
*/
private double[] combinedBuffer_;
//**CONSTRUCTORS**********************************************************
private HeapUpdateDoublesSketch(final int k) {
super(k); //Checks k
}
/**
* Obtains a new on-heap instance of a DoublesSketch.
*
* @param k Parameter that controls space usage of sketch and accuracy of estimates.
* Must be greater than 1 and less than 65536 and a power of 2.
* @return a HeapUpdateDoublesSketch
*/
static HeapUpdateDoublesSketch newInstance(final int k) {
final HeapUpdateDoublesSketch hqs = new HeapUpdateDoublesSketch(k);
final int baseBufAlloc = 2 * Math.min(MIN_K, k); //the min is important
hqs.n_ = 0;
hqs.combinedBuffer_ = new double[baseBufAlloc];
hqs.baseBufferCount_ = 0;
hqs.bitPattern_ = 0;
hqs.minItem_ = Double.NaN;
hqs.maxItem_ = Double.NaN;
return hqs;
}
/**
* Heapifies the given srcMem, which must be a Memory image of a DoublesSketch and may have data.
*
* @param srcMem a Memory image of a sketch, which may be in compact or not compact form.
* <a href="{@docRoot}/resources/dictionary.html#mem">See Memory</a>
* @return a DoublesSketch on the Java heap.
*/
static HeapUpdateDoublesSketch heapifyInstance(final Memory srcMem) {
final long memCapBytes = srcMem.getCapacity();
if (memCapBytes < 8) {
throw new SketchesArgumentException("Source Memory too small: " + memCapBytes + " < 8");
}
final int preLongs = extractPreLongs(srcMem);
final int serVer = extractSerVer(srcMem);
final int familyID = extractFamilyID(srcMem);
final int flags = extractFlags(srcMem);
final int k = extractK(srcMem);
final boolean empty = (flags & EMPTY_FLAG_MASK) > 0; //Preamble flags empty state
final long n = empty ? 0 : extractN(srcMem);
//VALIDITY CHECKS
DoublesUtil.checkDoublesSerVer(serVer, MIN_HEAP_DOUBLES_SER_VER);
checkHeapFlags(flags);
checkPreLongsFlagsSerVer(flags, serVer, preLongs);
checkFamilyID(familyID);
final HeapUpdateDoublesSketch hds = newInstance(k); //checks k
if (empty) { return hds; }
//Not empty, must have valid preamble + min, max, n.
//Forward compatibility from SerVer = 1 :
final boolean srcIsCompact = (serVer == 2) | ((flags & COMPACT_FLAG_MASK) > 0);
checkHeapMemCapacity(k, n, srcIsCompact, serVer, memCapBytes);
//set class members by computing them
hds.n_ = n;
final int combBufCap = computeCombinedBufferItemCapacity(k, n);
hds.baseBufferCount_ = computeBaseBufferItems(k, n);
hds.bitPattern_ = computeBitPattern(k, n);
//Extract min, max, data from srcMem into Combined Buffer
hds.srcMemoryToCombinedBuffer(srcMem, serVer, srcIsCompact, combBufCap);
return hds;
}
@Override
public double getMaxItem() {
if (isEmpty()) { throw new IllegalArgumentException(QuantilesAPI.EMPTY_MSG); }
return maxItem_;
}
@Override
public double getMinItem() {
if (isEmpty()) { throw new IllegalArgumentException(QuantilesAPI.EMPTY_MSG); }
return minItem_;
}
@Override
public long getN() {
return n_;
}
@Override
public boolean hasMemory() {
return false;
}
@Override
public boolean isDirect() {
return false;
}
@Override
public boolean isReadOnly() {
return false;
}
@Override
public void reset() {
n_ = 0;
final int combinedBufferItemCapacity = 2 * Math.min(MIN_K, k_); //min is important
combinedBuffer_ = new double[combinedBufferItemCapacity];
baseBufferCount_ = 0;
bitPattern_ = 0;
minItem_ = Double.NaN;
maxItem_ = Double.NaN;
}
@Override
public void update(final double dataItem) {
if (Double.isNaN(dataItem)) { return; }
if (n_ == 0) {
putMaxItem(dataItem);
putMinItem(dataItem);
} else {
if (dataItem > getMaxItem()) { putMaxItem(dataItem); }
if (dataItem < getMinItem()) { putMinItem(dataItem); }
}
//don't increment n_ and baseBufferCount_ yet
final int curBBCount = baseBufferCount_;
final int newBBCount = curBBCount + 1;
final long newN = n_ + 1;
final int combBufItemCap = combinedBuffer_.length;
if (newBBCount > combBufItemCap) {
growBaseBuffer(); //only changes combinedBuffer when it is only a base buffer
}
//put the new item in the base buffer
combinedBuffer_[curBBCount] = dataItem;
if (newBBCount == (k_ << 1)) { //Propagate
// make sure there will be enough space (levels) for the propagation
final int spaceNeeded = DoublesUpdateImpl.getRequiredItemCapacity(k_, newN);
if (spaceNeeded > combBufItemCap) {
// copies base buffer plus old levels, adds space for new level
growCombinedBuffer(combBufItemCap, spaceNeeded);
}
// sort only the (full) base buffer via accessor which modifies the underlying base buffer,
// then use as one of the inputs to propagate-carry
final DoublesSketchAccessor bbAccessor = DoublesSketchAccessor.wrap(this, true);
bbAccessor.sort();
final long newBitPattern = DoublesUpdateImpl.inPlacePropagateCarry(
0, // starting level
null,
bbAccessor,
true,
k_,
DoublesSketchAccessor.wrap(this, true),
bitPattern_
);
assert newBitPattern == computeBitPattern(k_, newN); // internal consistency check
assert newBitPattern == (bitPattern_ + 1);
bitPattern_ = newBitPattern;
baseBufferCount_ = 0;
} else {
//bitPattern unchanged
baseBufferCount_ = newBBCount;
}
n_ = newN;
classicQdsSV = null;
}
/**
* Loads the Combined Buffer, min and max from the given source Memory.
* The resulting Combined Buffer is always in non-compact form and must be pre-allocated.
* @param srcMem the given source Memory
* @param serVer the serialization version of the source
* @param srcIsCompact true if the given source Memory is in compact form
* @param combBufCap total items for the combined buffer (size in doubles)
*/
private void srcMemoryToCombinedBuffer(final Memory srcMem, final int serVer,
final boolean srcIsCompact, final int combBufCap) {
final int preLongs = 2;
final int extra = (serVer == 1) ? 3 : 2; // space for min and max quantiles, buf alloc (SerVer 1)
final int preBytes = (preLongs + extra) << 3;
final int bbCnt = baseBufferCount_;
final int k = getK();
final long n = getN();
final double[] combinedBuffer = new double[combBufCap]; //always non-compact
//Load min, max
putMinItem(srcMem.getDouble(MIN_DOUBLE));
putMaxItem(srcMem.getDouble(MAX_DOUBLE));
if (srcIsCompact) {
//Load base buffer
srcMem.getDoubleArray(preBytes, combinedBuffer, 0, bbCnt);
//Load levels from compact srcMem
long bitPattern = bitPattern_;
if (bitPattern != 0) {
long memOffset = preBytes + (bbCnt << 3);
int combBufOffset = 2 * k;
while (bitPattern != 0L) {
if ((bitPattern & 1L) > 0L) {
srcMem.getDoubleArray(memOffset, combinedBuffer, combBufOffset, k);
memOffset += (k << 3); //bytes, increment compactly
}
combBufOffset += k; //doubles, increment every level
bitPattern >>>= 1;
}
}
} else { //srcMem not compact
final int levels = computeNumLevelsNeeded(k, n);
final int totItems = (levels == 0) ? bbCnt : (2 + levels) * k;
srcMem.getDoubleArray(preBytes, combinedBuffer, 0, totItems);
}
putCombinedBuffer(combinedBuffer);
}
//Restricted overrides
//Gets
@Override
int getBaseBufferCount() {
return baseBufferCount_;
}
@Override
int getCombinedBufferItemCapacity() {
return combinedBuffer_.length;
}
@Override
double[] getCombinedBuffer() {
return combinedBuffer_;
}
@Override
long getBitPattern() {
return bitPattern_;
}
@Override
WritableMemory getMemory() {
return null;
}
//Puts
@Override
void putMinItem(final double minItem) {
minItem_ = minItem;
}
@Override
void putMaxItem(final double maxItem) {
maxItem_ = maxItem;
}
@Override
void putN(final long n) {
n_ = n;
}
@Override
void putCombinedBuffer(final double[] combinedBuffer) {
combinedBuffer_ = combinedBuffer;
}
@Override
void putBaseBufferCount(final int baseBufferCount) {
baseBufferCount_ = baseBufferCount;
}
@Override
void putBitPattern(final long bitPattern) {
bitPattern_ = bitPattern;
}
@Override //the returned array is not always used
double[] growCombinedBuffer(final int currentSpace, final int spaceNeeded) {
combinedBuffer_ = Arrays.copyOf(combinedBuffer_, spaceNeeded);
return combinedBuffer_;
}
/**
* This is only used for on-heap sketches, and grows the Base Buffer by factors of 2 until it
* reaches the maximum size of 2 * k. It is only called when there are no levels above the
* Base Buffer.
*/
//important: n has not been incremented yet
private void growBaseBuffer() {
final int oldSize = combinedBuffer_.length;
assert oldSize < (2 * k_);
final double[] baseBuffer = combinedBuffer_;
final int newSize = 2 * Math.max(Math.min(k_, oldSize), MIN_K);
combinedBuffer_ = Arrays.copyOf(baseBuffer, newSize);
}
static void checkPreLongsFlagsSerVer(final int flags, final int serVer, final int preLongs) {
final boolean empty = (flags & EMPTY_FLAG_MASK) > 0;
final boolean compact = (flags & COMPACT_FLAG_MASK) > 0;
final int sw = (compact ? 1 : 0) + (2 * (empty ? 1 : 0)) + (4 * (serVer & 0xF))
+ (32 * (preLongs & 0x3F));
boolean valid = true;
switch (sw) { //These are the valid cases.
case 38 : break; //!compact, empty, serVer = 1, preLongs = 1; always stored as not compact
case 164 : break; //!compact, !empty, serVer = 1, preLongs = 5; always stored as not compact
case 42 : break; //!compact, empty, serVer = 2, preLongs = 1; always stored as compact
case 72 : break; //!compact, !empty, serVer = 2, preLongs = 2; always stored as compact
case 47 : break; // compact, empty, serVer = 3, preLongs = 1;
case 46 : break; //!compact, empty, serVer = 3, preLongs = 1;
case 79 : break; // compact, empty, serVer = 3, preLongs = 2;
case 78 : break; //!compact, empty, serVer = 3, preLongs = 2;
case 77 : break; // compact, !empty, serVer = 3, preLongs = 2;
case 76 : break; //!compact, !empty, serVer = 3, preLongs = 2;
default : //all other cases are invalid
valid = false;
}
if (!valid) {
throw new SketchesArgumentException("Possible corruption. Inconsistent state: "
+ "PreambleLongs = " + preLongs + ", empty = " + empty + ", SerVer = " + serVer
+ ", Compact = " + compact);
}
}
/**
* Checks the validity of the heap memory capacity assuming n, k and the compact state.
* @param k the given k
* @param n the given n
* @param compact true if memory is in compact form
* @param serVer serialization version of the source
* @param memCapBytes the current memory capacity in bytes
*/
static void checkHeapMemCapacity(final int k, final long n, final boolean compact,
final int serVer, final long memCapBytes) {
final int metaPre = Family.QUANTILES.getMaxPreLongs() + ((serVer == 1) ? 3 : 2);
final int retainedItems = computeRetainedItems(k, n);
final int reqBufBytes;
if (compact) {
reqBufBytes = (metaPre + retainedItems) << 3;
} else { //not compact
final int totLevels = computeNumLevelsNeeded(k, n);
reqBufBytes = (totLevels == 0)
? (metaPre + retainedItems) << 3
: (metaPre + ((2 + totLevels) * k)) << 3;
}
if (memCapBytes < reqBufBytes) {
throw new SketchesArgumentException("Possible corruption: Memory capacity too small: "
+ memCapBytes + " < " + reqBufBytes);
}
}
}
| 2,811 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/DoublesSketchSortedViewIterator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import static org.apache.datasketches.quantilescommon.QuantileSearchCriteria.INCLUSIVE;
import org.apache.datasketches.quantilescommon.DoublesSortedViewIterator;
import org.apache.datasketches.quantilescommon.QuantileSearchCriteria;
/**
* Iterator over DoublesSketchSortedView.
*/
public final class DoublesSketchSortedViewIterator implements DoublesSortedViewIterator {
private final double[] quantiles;
private final long[] cumWeights;
private final long totalN;
private int index;
DoublesSketchSortedViewIterator(final double[] quantiles, final long[] cumWeights) {
this.quantiles = quantiles;
this.cumWeights = cumWeights;
this.totalN = (cumWeights.length > 0) ? cumWeights[cumWeights.length - 1] : 0;
index = -1;
}
@Override
public long getCumulativeWeight(final QuantileSearchCriteria searchCrit) {
if (searchCrit == INCLUSIVE) { return cumWeights[index]; }
return (index == 0) ? 0 : cumWeights[index - 1];
}
@Override
public long getN() {
return totalN;
}
@Override
public double getNormalizedRank(final QuantileSearchCriteria searchCrit) {
return (double) getCumulativeWeight(searchCrit) / totalN;
}
@Override
public double getQuantile() {
return quantiles[index];
}
@Override
public long getWeight() {
if (index == 0) { return cumWeights[0]; }
return cumWeights[index] - cumWeights[index - 1];
}
@Override
public boolean next() {
index++;
return index < quantiles.length;
}
}
| 2,812 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/DoublesUpdateImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
/**
* The doubles update algorithms for quantiles.
*
* @author Lee Rhodes
* @author Kevin Lang
*/
final class DoublesUpdateImpl {
private DoublesUpdateImpl() {}
/**
* Returns item capacity needed based on n and k, which may or may not be larger than
* current space allocated.
* @param k current k
* @param newN the new n
* @return item capacity based on n and k. It may or may not be different.
*/
//important: newN might not equal n_
// This only increases the size and does not touch or move any data.
static int getRequiredItemCapacity(final int k, final long newN) {
final int numLevelsNeeded = ClassicUtil.computeNumLevelsNeeded(k, newN);
if (numLevelsNeeded == 0) {
// don't need any levels yet, and might have small base buffer; this can happen during a merge
return 2 * k;
}
// from here on we need a full-size base buffer and at least one level
assert newN >= (2L * k);
assert numLevelsNeeded > 0;
final int spaceNeeded = (2 + numLevelsNeeded) * k;
return spaceNeeded;
}
/**
* This is used to propagate-carry (ripple-carry) an update that will cause the full, sorted
* base buffer to empty into the levels hierarchy, thus creating a ripple effect up
* through the higher levels. It is also used during merge operations with the only difference
* is the base buffer(s) could have valid data and is less than full.
* This distinction is determined by the <i>doUpdateVersion</i> flag.
*
* <p>Prior to this method being called, any extra space for the combined buffer required
* by either the update or merge operations must already be allocated.</p>
*
* <p><b>Update Version:</b> The base buffer is initially full, and after it has been sorted and
* zipped, will be used as a size2KBuf scratch buffer for the remaining recursive carries.
* The lowest non-valid level, determined by the bit-pattern, will used internally as a
* size K scratch buffer and the ultimate target.
* Thus no additional buffer storage is required outside the combined buffer.</p>
*
* <p><b>Merge Version:</b> During merging, each level from the source sketch that must be
* merged is entered into this method and is assigned to the optional source size K buffer
* (<i>optSrcKBuf</i>). Because the base buffer may have data, a separate size2K
* scratch buffer must be provided. The next-lowest. non-valid level, determined by the
* bit-pattern, will used as a sizeKBuf scratch buffer.</p>
*
* <p><b>Downsample Merge Version:</b> This is a variant of the above Merge Version, except at
* each level the downsampling is performed and the target level is computed for the target merge.
* In this case the optSrcKBuf is the result of the downsample process and needs to be allocated
* for that purpose.
*
* <p><b>Recursive carry:</b> This starts with a given sorted, size 2K buffer, which is zipped
* into a size K buffer. If the next level is not valid, the size K buffer is already in position,
* the bit pattern is updated and returned.</p>
*
* <p>If the next level is valid, it is merged with the size K buffer into the size 2K buffer.
* Continue the recursion until a non-valid level becomes filled by the size K buffer,
* the bit pattern is updated and returned.</p>
*
* @param startingLevel 0-based starting level
* @param optSrcKBuf optional, size k source, read only buffer
* @param size2KBuf size 2k scratch buffer
* @param doUpdateVersion true if update version
* @param k the target k
* @param tgtSketchBuf the given DoublesSketchAccessor
* @param bitPattern the current bitPattern, prior to this call
* @return The updated bit pattern. The updated combined buffer is output as a side effect.
*/
static long inPlacePropagateCarry(
final int startingLevel,
final DoublesBufferAccessor optSrcKBuf,
final DoublesBufferAccessor size2KBuf,
final boolean doUpdateVersion,
final int k,
final DoublesSketchAccessor tgtSketchBuf,
final long bitPattern) {
final int endingLevel = ClassicUtil.lowestZeroBitStartingAt(bitPattern, startingLevel);
tgtSketchBuf.setLevel(endingLevel);
if (doUpdateVersion) { // update version of computation
// its is okay for optSrcKBuf to be null in this case
zipSize2KBuffer(size2KBuf, tgtSketchBuf);
} else { // mergeInto version of computation
assert (optSrcKBuf != null);
tgtSketchBuf.putArray(optSrcKBuf.getArray(0, k), 0, 0, k);
}
for (int lvl = startingLevel; lvl < endingLevel; lvl++) {
assert (bitPattern & (1L << lvl)) > 0; // internal consistency check
final DoublesSketchAccessor currLevelBuf = tgtSketchBuf.copyAndSetLevel(lvl);
mergeTwoSizeKBuffers(
currLevelBuf, // target level: lvl
tgtSketchBuf, // target level: endingLevel
size2KBuf);
zipSize2KBuffer(size2KBuf, tgtSketchBuf);
} // end of loop over lower levels
// update bit pattern with binary-arithmetic ripple carry
return bitPattern + (1L << startingLevel);
}
private static void zipSize2KBuffer(
final DoublesBufferAccessor bufIn,
final DoublesBufferAccessor bufOut) {
final int randomOffset = DoublesSketch.rand.nextBoolean() ? 1 : 0;
final int limOut = bufOut.numItems();
for (int idxIn = randomOffset, idxOut = 0; idxOut < limOut; idxIn += 2, idxOut++) {
bufOut.set(idxOut, bufIn.get(idxIn));
}
}
private static void mergeTwoSizeKBuffers(
final DoublesBufferAccessor src1,
final DoublesBufferAccessor src2,
final DoublesBufferAccessor dst) {
assert src1.numItems() == src2.numItems();
final int k = src1.numItems();
int i1 = 0;
int i2 = 0;
int iDst = 0;
while ((i1 < k) && (i2 < k)) {
if (src2.get(i2) < src1.get(i1)) {
dst.set(iDst++, src2.get(i2++));
} else {
dst.set(iDst++, src1.get(i1++));
}
}
if (i1 < k) {
final int numItems = k - i1;
dst.putArray(src1.getArray(i1, numItems), 0, iDst, numItems);
} else {
final int numItems = k - i2;
dst.putArray(src2.getArray(i2, numItems), 0, iDst, numItems);
}
}
}
| 2,813 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/CompactDoublesSketch.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import org.apache.datasketches.common.SketchesStateException;
import org.apache.datasketches.memory.Memory;
/**
* Compact sketches are inherently <i>read ony</i>.
* @author Jon Malkin
*/
public abstract class CompactDoublesSketch extends DoublesSketch {
CompactDoublesSketch(final int k) {
super(k);
}
public static CompactDoublesSketch heapify(final Memory srcMem) {
return HeapCompactDoublesSketch.heapifyInstance(srcMem);
}
@Override
boolean isCompact() {
return true;
}
@Override
public boolean isReadOnly() {
return false;
}
@Override
public void reset() {
throw new SketchesStateException("Cannot reset a compact sketch, which is read-only.");
}
@Override
public void update(final double quantile) {
throw new SketchesStateException("Cannot update a compact sketch, which is read-only.");
}
}
| 2,814 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/DoublesBufferAccessor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
/**
* @author Jon Malkin
*/
abstract class DoublesBufferAccessor {
abstract double get(final int index);
abstract double set(final int index, final double quantile);
abstract int numItems();
abstract double[] getArray(int fromIdx, int numItems);
abstract void putArray(double[] srcArray, int srcIndex,
int dstIndex, int numItems);
}
| 2,815 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/DoublesByteArrayImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import static org.apache.datasketches.quantiles.ClassicUtil.DOUBLES_SER_VER;
import static org.apache.datasketches.quantiles.ClassicUtil.computeBaseBufferItems;
import static org.apache.datasketches.quantiles.ClassicUtil.computeTotalLevels;
import static org.apache.datasketches.quantiles.PreambleUtil.COMPACT_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.EMPTY_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.ORDERED_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.READ_ONLY_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.insertFamilyID;
import static org.apache.datasketches.quantiles.PreambleUtil.insertFlags;
import static org.apache.datasketches.quantiles.PreambleUtil.insertK;
import static org.apache.datasketches.quantiles.PreambleUtil.insertMaxDouble;
import static org.apache.datasketches.quantiles.PreambleUtil.insertMinDouble;
import static org.apache.datasketches.quantiles.PreambleUtil.insertN;
import static org.apache.datasketches.quantiles.PreambleUtil.insertPreLongs;
import static org.apache.datasketches.quantiles.PreambleUtil.insertSerVer;
import java.util.Arrays;
import org.apache.datasketches.common.Family;
import org.apache.datasketches.memory.WritableMemory;
/**
* The doubles to byte array algorithms.
*
* @author Lee Rhodes
* @author Jon Malkin
*/
final class DoublesByteArrayImpl {
private DoublesByteArrayImpl() {}
static byte[] toByteArray(final DoublesSketch sketch, final boolean ordered, final boolean compact) {
final boolean empty = sketch.isEmpty();
//create the flags byte
final int flags = (empty ? EMPTY_FLAG_MASK : 0)
| (ordered ? ORDERED_FLAG_MASK : 0)
| (compact ? (COMPACT_FLAG_MASK | READ_ONLY_FLAG_MASK) : 0);
if (empty && !sketch.hasMemory()) { //empty & has Memory
final byte[] outByteArr = new byte[Long.BYTES];
final WritableMemory memOut = WritableMemory.writableWrap(outByteArr);
final int preLongs = 1;
insertPre0(memOut, preLongs, flags, sketch.getK());
return outByteArr;
}
//not empty || direct; flags passed for convenience
return convertToByteArray(sketch, flags, ordered, compact);
}
/**
* Returns a byte array, including preamble, min, max and data extracted from the sketch.
* @param sketch the given DoublesSketch
* @param flags the Flags field
* @param ordered true if the desired form of the resulting array has the base buffer sorted.
* @param compact true if the desired form of the resulting array is in compact form.
* @return a byte array, including preamble, min, max and data extracted from the Combined Buffer.
*/
private static byte[] convertToByteArray(final DoublesSketch sketch, final int flags,
final boolean ordered, final boolean compact) {
final int preLongs = 2;
final int extra = 2; // extra space for min and max quantiles
final int prePlusExtraBytes = (preLongs + extra) << 3;
final int k = sketch.getK();
final long n = sketch.getN();
// If not-compact, have accessor always report full levels. Then use level size to determine
// whether to copy data out.
final DoublesSketchAccessor dsa = DoublesSketchAccessor.wrap(sketch, !compact);
final int outBytes = (compact ? sketch.getCurrentCompactSerializedSizeBytes()
: sketch.getCurrentUpdatableSerializedSizeBytes());
final byte[] outByteArr = new byte[outBytes];
final WritableMemory memOut = WritableMemory.writableWrap(outByteArr);
//insert preamble-0, N, min, max
insertPre0(memOut, preLongs, flags, k);
if (sketch.isEmpty()) { return outByteArr; }
insertN(memOut, n);
insertMinDouble(memOut, sketch.isEmpty() ? Double.NaN : sketch.getMinItem());
insertMaxDouble(memOut, sketch.isEmpty() ? Double.NaN : sketch.getMaxItem());
long memOffsetBytes = prePlusExtraBytes;
// might need to sort base buffer but don't want to change input sketch
final int bbCnt = computeBaseBufferItems(k, n);
if (bbCnt > 0) { //Base buffer items only
final double[] bbItemsArr = dsa.getArray(0, bbCnt);
if (ordered) { Arrays.sort(bbItemsArr); }
memOut.putDoubleArray(memOffsetBytes, bbItemsArr, 0, bbCnt);
}
// If n < 2k, totalLevels == 0 so ok to overshoot the offset update
memOffsetBytes += (compact ? bbCnt : 2 * k) << 3;
// If serializing from a compact sketch to a non-compact form, we may end up copying data for a
// higher level one or more times into an unused level. A bit wasteful, but not incorrect.
final int totalLevels = computeTotalLevels(sketch.getBitPattern());
for (int lvl = 0; lvl < totalLevels; ++lvl) {
dsa.setLevel(lvl);
if (dsa.numItems() > 0) {
assert dsa.numItems() == k;
memOut.putDoubleArray(memOffsetBytes, dsa.getArray(0, k), 0, k);
memOffsetBytes += (k << 3);
}
}
return outByteArr;
}
private static void insertPre0(final WritableMemory wmem,
final int preLongs, final int flags, final int k) {
insertPreLongs(wmem, preLongs);
insertSerVer(wmem, DOUBLES_SER_VER);
insertFamilyID(wmem, Family.QUANTILES.getID());
insertFlags(wmem, flags);
insertK(wmem, k);
}
}
| 2,816 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/ItemsSketchSortedView.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import static org.apache.datasketches.quantilescommon.QuantileSearchCriteria.INCLUSIVE;
import java.lang.reflect.Array;
import java.util.Arrays;
import java.util.Comparator;
import org.apache.datasketches.common.SketchesStateException;
import org.apache.datasketches.quantilescommon.GenericInequalitySearch;
import org.apache.datasketches.quantilescommon.GenericInequalitySearch.Inequality;
import org.apache.datasketches.quantilescommon.GenericSortedView;
import org.apache.datasketches.quantilescommon.GenericSortedViewIterator;
import org.apache.datasketches.quantilescommon.InequalitySearch;
import org.apache.datasketches.quantilescommon.QuantileSearchCriteria;
import org.apache.datasketches.quantilescommon.QuantilesAPI;
import org.apache.datasketches.quantilescommon.QuantilesUtil;
/**
* The SortedView of the Classic Quantiles ItemsSketch.
* @param <T> The sketch data type
* @author Kevin Lang
* @author Alexander Saydakov
*/
public class ItemsSketchSortedView<T> implements GenericSortedView<T> {
private final T[] quantiles;
private final long[] cumWeights; //comes in as individual weights, converted to cumulative natural weights
private final long totalN;
private final Comparator<? super T> comparator;
/**
* Construct from elements for testing.
* @param quantiles sorted array of quantiles
* @param cumWeights sorted, monotonically increasing cumulative weights.
* @param totalN the total number of items presented to the sketch.
* @param comparator comparator for type T
*/
ItemsSketchSortedView(
final T[] quantiles,
final long[] cumWeights,
final long totalN,
final Comparator<T> comparator) {
this.quantiles = quantiles;
this.cumWeights = cumWeights;
this.totalN = totalN;
this.comparator = comparator;
}
/**
* Constructs this Sorted View given the sketch
* @param sketch the given Classic Quantiles ItemsSketch
*/
@SuppressWarnings("unchecked")
ItemsSketchSortedView(final ItemsSketch<T> sketch) {
this.totalN = sketch.getN();
final int k = sketch.getK();
final int numQuantiles = sketch.getNumRetained();
quantiles = (T[]) Array.newInstance(sketch.clazz, numQuantiles);
cumWeights = new long[numQuantiles];
comparator = sketch.getComparator();
final Object[] combinedBuffer = sketch.getCombinedBuffer();
final int baseBufferCount = sketch.getBaseBufferCount();
// Populate from ItemsSketch:
// copy over the "levels" and then the base buffer, all with appropriate weights
populateFromItemsSketch(k, totalN, sketch.getBitPattern(), (T[]) combinedBuffer, baseBufferCount,
numQuantiles, quantiles, cumWeights, sketch.getComparator());
// Sort the first "numSamples" slots of the two arrays in tandem,
// taking advantage of the already sorted blocks of length k
ItemsMergeImpl.blockyTandemMergeSort(quantiles, cumWeights, numQuantiles, k, sketch.getComparator());
if (convertToCumulative(cumWeights) != totalN) {
throw new SketchesStateException("Sorted View is misconfigured. TotalN does not match cumWeights.");
}
}
@Override //implemented here because it needs the comparator
public double[] getCDF(final T[] splitPoints, final QuantileSearchCriteria searchCrit) {
if (isEmpty()) { throw new IllegalArgumentException(QuantilesAPI.EMPTY_MSG); }
GenericSortedView.validateItems(splitPoints, comparator);
final int len = splitPoints.length + 1;
final double[] buckets = new double[len];
for (int i = 0; i < len - 1; i++) {
buckets[i] = getRank(splitPoints[i], searchCrit);
}
buckets[len - 1] = 1.0;
return buckets;
}
@Override
public long[] getCumulativeWeights() {
return cumWeights.clone();
}
@Override //implemented here because it needs the comparator
public double[] getPMF(final T[] splitPoints, final QuantileSearchCriteria searchCrit) {
if (isEmpty()) { throw new IllegalArgumentException(QuantilesAPI.EMPTY_MSG); }
GenericSortedView.validateItems(splitPoints, comparator);
final double[] buckets = getCDF(splitPoints, searchCrit);
final int len = buckets.length;
for (int i = len; i-- > 1; ) {
buckets[i] -= buckets[i - 1];
}
return buckets;
}
@Override
public T getQuantile(final double rank, final QuantileSearchCriteria searchCrit) {
if (isEmpty()) { throw new IllegalArgumentException(QuantilesAPI.EMPTY_MSG); }
QuantilesUtil.checkNormalizedRankBounds(rank);
final int len = cumWeights.length;
final long naturalRank = (searchCrit == INCLUSIVE)
? (long)Math.ceil(rank * totalN) : (long)Math.floor(rank * totalN);
final InequalitySearch crit = (searchCrit == INCLUSIVE) ? InequalitySearch.GE : InequalitySearch.GT;
final int index = InequalitySearch.find(cumWeights, 0, len - 1, naturalRank, crit);
if (index == -1) {
return quantiles[quantiles.length - 1]; //EXCLUSIVE (GT) case: normRank == 1.0;
}
return quantiles[index];
}
@Override
public T[] getQuantiles() {
return quantiles.clone();
}
@Override
public double getRank(final T quantile, final QuantileSearchCriteria searchCrit) {
if (isEmpty()) { throw new IllegalArgumentException(QuantilesAPI.EMPTY_MSG); }
final int len = quantiles.length;
final Inequality crit = (searchCrit == INCLUSIVE) ? Inequality.LE : Inequality.LT;
final int index = GenericInequalitySearch.find(quantiles, 0, len - 1, quantile, crit, comparator);
if (index == -1) {
return 0; //EXCLUSIVE (LT) case: quantile <= minQuantile; INCLUSIVE (LE) case: quantile < minQuantile
}
return (double)cumWeights[index] / totalN;
}
@Override
public boolean isEmpty() {
return totalN == 0;
}
@Override
public ItemsSketchSortedViewIterator<T> iterator() {
return new ItemsSketchSortedViewIterator<>(quantiles, cumWeights);
}
//restricted methods
/**
* Populate the arrays and registers from an ItemsSketch
* @param <T> the data type
* @param k K parameter of sketch
* @param n The current size of the stream
* @param bitPattern the bit pattern for valid log levels
* @param combinedBuffer the combined buffer reference
* @param baseBufferCount the count of the base buffer
* @param numQuantiles number of retained quantiles in the sketch
* @param quantilesArr the consolidated array of all quantiles from the sketch
* @param weightsArr the weights for each item from the sketch
* @param comparator the given comparator for data type T
*/
private final static <T> void populateFromItemsSketch(
final int k, final long n, final long bitPattern, final T[] combinedBuffer,
final int baseBufferCount, final int numQuantiles, final T[] quantilesArr, final long[] weightsArr,
final Comparator<? super T> comparator) {
long weight = 1;
int nxt = 0;
long bits = bitPattern;
assert bits == (n / (2L * k)); // internal consistency check
for (int lvl = 0; bits != 0L; lvl++, bits >>>= 1) {
weight *= 2;
if ((bits & 1L) > 0L) {
final int offset = (2 + lvl) * k;
for (int i = 0; i < k; i++) {
quantilesArr[nxt] = combinedBuffer[i + offset];
weightsArr[nxt] = weight;
nxt++;
}
}
}
weight = 1; //NOT a mistake! We just copied the highest level; now we need to copy the base buffer
final int startOfBaseBufferBlock = nxt;
// Copy BaseBuffer over, along with weight = 1
for (int i = 0; i < baseBufferCount; i++) {
quantilesArr[nxt] = combinedBuffer[i];
weightsArr[nxt] = weight;
nxt++;
}
assert nxt == numQuantiles;
// Must sort the items that came from the base buffer.
// Don't need to sort the corresponding weights because they are all the same.
Arrays.sort(quantilesArr, startOfBaseBufferBlock, numQuantiles, comparator);
}
/**
* Convert the individual weights into cumulative weights.
* An array of {1,1,1,1} becomes {1,2,3,4}
* @param array of actual weights from the sketch, none of the weights may be zero
* @return total weight
*/
private static long convertToCumulative(final long[] array) {
long subtotal = 0;
for (int i = 0; i < array.length; i++) {
final long newSubtotal = subtotal + array[i];
subtotal = array[i] = newSubtotal;
}
return subtotal;
}
/**
* Iterator over ItemsSketchSortedView.
* @param <T> type of quantile (item)
*/
public static final class ItemsSketchSortedViewIterator<T> extends GenericSortedViewIterator<T> {
ItemsSketchSortedViewIterator(final T[] quantiles, final long[] cumWeights) {
super(quantiles, cumWeights);
}
}
}
| 2,817 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/DoublesUnion.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import org.apache.datasketches.memory.Memory;
import org.apache.datasketches.memory.WritableMemory;
/**
* The API for Union operations for quantiles DoublesSketches
*
* @author Lee Rhodes
*/
public abstract class DoublesUnion {
/**
* Returns a new UnionBuilder
* @return a new UnionBuilder
*/
public static DoublesUnionBuilder builder() {
return new DoublesUnionBuilder();
}
/**
* Returns a Heap Union object that has been initialized with the data from the given sketch.
* @param sketch A DoublesSketch to be used as a source of data only and will not be modified.
* @return a DoublesUnion object
*/
public static DoublesUnion heapify(final DoublesSketch sketch) {
return DoublesUnionImpl.heapifyInstance(sketch);
}
/**
* Returns a Heap Union object that has been initialized with the data from the given memory
* image of a sketch.
*
* @param srcMem A memory image of a DoublesSketch to be used as a source of data,
* but will not be modified.
* @return a Union object
*/
public static DoublesUnion heapify(final Memory srcMem) {
return DoublesUnionImpl.heapifyInstance(srcMem);
}
/**
* Returns a read-only Union object that wraps off-heap data of the given memory image of
* a sketch. The data structures of the Union remain off-heap.
*
* @param mem A memory region to be used as the data structure for the sketch
* and will be modified.
* @return a Union object
*/
public static DoublesUnion wrap(final Memory mem) {
return DoublesUnionImplR.wrapInstance(mem);
}
/**
* Returns an updatable Union object that wraps off-heap data of the given memory image of
* a sketch. The data structures of the Union remain off-heap.
*
* @param mem A memory region to be used as the data structure for the sketch
* and will be modified.
* @return a Union object
*/
public static DoublesUnion wrap(final WritableMemory mem) {
return DoublesUnionImpl.wrapInstance(mem);
}
/**
* Returns true if this union's data structure is backed by Memory or WritableMemory.
* @return true if this union's data structure is backed by Memory or WritableMemory.
*/
public abstract boolean hasMemory();
/**
* Returns true if this union is off-heap (direct)
* @return true if this union is off-heap (direct)
*/
public abstract boolean isDirect();
/**
* Returns true if this union is empty
* @return true if this union is empty
*/
public abstract boolean isEmpty();
/**
* Returns the configured <i>maxK</i> of this Union.
* @return the configured <i>maxK</i> of this Union.
*/
public abstract int getMaxK();
/**
* Returns the effective <i>k</i> of this Union.
* @return the effective <i>k</i> of this Union.
*/
public abstract int getEffectiveK();
/**
* Iterative union operation, which means this method can be repeatedly called.
* Merges the given sketch into this union object.
* The given sketch is not modified.
* It is required that the ratio of the two K's be a power of 2.
* This is easily satisfied if each of the K's are already a power of 2.
* If the given sketch is null or empty it is ignored.
*
* <p>It is required that the results of the union operation, which can be obtained at any time,
* is obtained from {@link #getResult() }.
*
* @param sketchIn the sketch to be merged into this one.
*/
public abstract void union(DoublesSketch sketchIn);
/**
* Iterative union operation, which means this method can be repeatedly called.
* Merges the given Memory image of a DoublesSketch into this union object.
* The given Memory object is not modified and a link to it is not retained.
* It is required that the ratio of the two K's be a power of 2.
* This is easily satisfied if each of the K's are already a power of 2.
* If the given sketch is null or empty it is ignored.
*
* <p>It is required that the results of the union operation, which can be obtained at any time,
* is obtained from {@link #getResult() }.
*
* @param mem Memory image of sketch to be merged
*/
public abstract void union(Memory mem);
/**
* Update this union with the given double (or float) data Item.
*
* @param dataItem The given double datum.
*/
public abstract void update(double dataItem);
/**
* Gets the result of this Union as an UpdateDoublesSketch, which enables further update
* operations on the resulting sketch. The Union state has not been changed, which allows
* further union operations.
*
* @return the result of this Union operation
*/
public abstract UpdateDoublesSketch getResult();
/**
* Places the result of this Union into the provided memory as an UpdateDoublesSketch,
* which enables further update operations on the resulting sketch. The Union state has not
* been changed, which allows further union operations.
*
* @param dstMem the destination memory for the result
* @return the result of this Union operation
*/
public abstract UpdateDoublesSketch getResult(WritableMemory dstMem);
/**
* Gets the result of this Union as an UpdateDoublesSketch, which enables further update
* operations on the resulting sketch. The Union is reset to the virgin state.
*
* @return the result of this Union operation and reset.
*/
public abstract UpdateDoublesSketch getResultAndReset();
/**
* Resets this Union to a virgin state.
*/
public abstract void reset();
/**
* Serialize this union to a byte array. Result is an UpdateDoublesSketch, serialized in an
* unordered, non-compact form. The resulting byte[] can be heapified or wrapped as either a
* sketch or a union.
*
* @return byte array of this union
*/
public abstract byte[] toByteArray();
/**
* Returns summary information about the backing sketch.
*/
@Override
public abstract String toString();
/**
* Returns summary information about the backing sketch. Used for debugging.
* @param sketchSummary if true includes sketch summary
* @param dataDetail if true includes data detail
* @return summary information about the sketch.
*/
public abstract String toString(boolean sketchSummary, boolean dataDetail);
/**
* Returns true if the backing resource of <i>this</i> is identical with the backing resource
* of <i>that</i>. The capacities must be the same. If <i>this</i> is a region,
* the region offset must also be the same.
* @param that A different non-null object
* @return true if the backing resource of <i>this</i> is the same as the backing resource
* of <i>that</i>.
*/
public abstract boolean isSameResource(final Memory that);
}
| 2,818 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/ItemsUnion.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import static org.apache.datasketches.common.Util.LS;
import java.util.Comparator;
import java.util.Objects;
import org.apache.datasketches.common.ArrayOfItemsSerDe;
import org.apache.datasketches.memory.Memory;
/**
* The API for Union operations for generic ItemsSketches
*
* @param <T> type of item
*
* @author Lee Rhodes
* @author Alexander Saydakov
*/
public final class ItemsUnion<T> {
final int maxK_;
final Comparator<? super T> comparator_;
ItemsSketch<T> gadget_;
Class<T> clazz_;
private ItemsUnion(final int maxK, final Comparator<? super T> comparator, final ItemsSketch<T> gadget) {
Objects.requireNonNull(gadget, "Gadjet sketch must not be null.");
Objects.requireNonNull(comparator, "Comparator must not be null.");
maxK_ = maxK;
comparator_ = comparator;
gadget_ = gadget;
clazz_ = gadget.clazz;
gadget_.classicQisSV = null;
}
/**
* Create an instance of ItemsUnion with the default k
* @param <T> The sketch data type
* @param clazz The sketch class type
* @param comparator to compare items
* @return an instance of ItemsUnion
*/
public static <T> ItemsUnion<T> getInstance(final Class<T> clazz, final Comparator<? super T> comparator) {
final ItemsSketch<T> emptySk = ItemsSketch.getInstance(clazz, comparator);
return new ItemsUnion<>(PreambleUtil.DEFAULT_K, comparator, emptySk);
}
/**
* Create an instance of ItemsUnion
* @param clazz The sketch class type
* @param <T> The sketch data type
* @param maxK determines the accuracy and size of the union and is a maximum.
* The effective <i>k</i> can be smaller due to unions with smaller <i>k</i> sketches.
* It is recommended that <i>maxK</i> be a power of 2 to enable unioning of sketches with a
* different <i>k</i>.
* @param comparator to compare items
* @return an instance of ItemsUnion
*/
public static <T> ItemsUnion<T> getInstance(final Class<T> clazz, final int maxK,
final Comparator<? super T> comparator) {
final ItemsSketch<T> emptySk = ItemsSketch.getInstance(clazz, maxK, comparator);
return new ItemsUnion<>(maxK, comparator, emptySk);
}
/**
* Heapify the given srcMem into a Union object.
* @param clazz The sketch class type
* A reference to srcMem will not be maintained internally.
* @param srcMem the given srcMem.
* @param comparator to compare items
* @param serDe an instance of ArrayOfItemsSerDe
* @param <T> The sketch data type
* @return an instance of ItemsUnion
*/
public static <T> ItemsUnion<T> getInstance(final Class<T> clazz, final Memory srcMem,
final Comparator<? super T> comparator, final ArrayOfItemsSerDe<T> serDe) {
final ItemsSketch<T> gadget = ItemsSketch.getInstance(clazz, srcMem, comparator, serDe);
return new ItemsUnion<>(gadget.getK(), gadget.getComparator(), gadget);
}
/**
* Create an instance of ItemsUnion based on ItemsSketch
* @param <T> The sketch data type
* @param sketch the basis of the union
* @return an instance of ItemsUnion
*/
public static <T> ItemsUnion<T> getInstance(final ItemsSketch<T> sketch) {
return new ItemsUnion<>(sketch.getK(), sketch.getComparator(), ItemsSketch.copy(sketch));
}
/**
* Iterative union operation, which means this method can be repeatedly called.
* Merges the given sketch into this union object.
* The given sketch is not modified.
* It is required that the ratio of the two K's be a power of 2.
* This is easily satisfied if each of the K's are already a power of 2.
* If the given sketch is null or empty it is ignored.
*
* <p>It is required that the results of the union operation, which can be obtained at any time,
* is obtained from {@link #getResult() }.</p>
*
* @param sketchIn the sketch to be merged into this one.
*/
public void union(final ItemsSketch<T> sketchIn) {
gadget_ = updateLogic(maxK_, comparator_, gadget_, sketchIn);
}
/**
* Iterative union operation, which means this method can be repeatedly called.
* Merges the given Memory image of a ItemsSketch into this union object.
* The given Memory object is not modified and a link to it is not retained.
* It is required that the ratio of the two K's be a power of 2.
* This is easily satisfied if each of the K's are already a power of 2.
* If the given sketch is null or empty it is ignored.
*
* <p>It is required that the results of the union operation, which can be obtained at any time,
* is obtained from {@link #getResult() }.</p>
* @param srcMem Memory image of sketch to be merged
* @param serDe an instance of ArrayOfItemsSerDe
*/
public void union(final Memory srcMem, final ArrayOfItemsSerDe<T> serDe) {
final ItemsSketch<T> that = ItemsSketch.getInstance(this.clazz_, srcMem, comparator_, serDe);
gadget_ = updateLogic(maxK_, comparator_, gadget_, that);
}
/**
* Update this union with the given dataItem.
*
* @param dataItem The given datum.
*/
public void update(final T dataItem) {
if (dataItem == null) { return; }
if (gadget_ == null) {
gadget_ = ItemsSketch.getInstance(this.clazz_, maxK_, comparator_);
}
gadget_.update(dataItem);
}
/**
* Gets the result of this Union operation as a copy of the internal state.
* This enables further union update operations on this state.
* @return the result of this Union operation
*/
public ItemsSketch<T> getResult() {
if (gadget_ == null) {
return ItemsSketch.getInstance(this.clazz_, maxK_, comparator_);
}
return ItemsSketch.copy(gadget_); //can't have any externally owned handles.
}
/**
* Gets the result of this Union operation (without a copy) and resets this Union to the
* virgin state.
*
* @return the result of this Union operation and reset.
*/
public ItemsSketch<T> getResultAndReset() {
if (gadget_ == null) { return null; } //Intentionally return null here for speed.
final ItemsSketch<T> hqs = gadget_;
gadget_ = null;
return hqs;
}
/**
* Resets this Union to a virgin state.
* Keeps maxK, comparator and clazz
*/
public void reset() {
gadget_ = null;
}
/**
* Returns true if this union is empty
* @return true if this union is empty
*/
public boolean isEmpty() {
return (gadget_ == null) || gadget_.isEmpty();
}
/**
* Returns true if this union is direct
* @return true if this union is direct
*/
public boolean isDirect() {
return (gadget_ != null) && gadget_.isDirect();
}
/**
* Returns the configured <i>maxK</i> of this Union.
* @return the configured <i>maxK</i> of this Union.
*/
public int getMaxK() {
return maxK_;
}
/**
* Returns the effective <i>k</i> of this Union.
* @return the effective <i>k</i> of this Union.
*/
public int getEffectiveK() {
return (gadget_ != null) ? gadget_.getK() : maxK_;
}
/**
* Returns summary information about the backing sketch.
*/
@Override
public String toString() {
return toString(true, false);
}
/**
* Returns summary information about the backing sketch. Used for debugging.
* @param sketchSummary if true includes sketch summary
* @param dataDetail if true includes data detail
* @return summary information about the sketch.
*/
public String toString(final boolean sketchSummary, final boolean dataDetail) {
final StringBuilder sb = new StringBuilder();
final String thisSimpleName = this.getClass().getSimpleName();
final int maxK = this.getMaxK();
final String kStr = String.format("%,d", maxK);
sb.append(ClassicUtil.LS).append("### Quantiles ").append(thisSimpleName).append(LS);
sb.append(" maxK : ").append(kStr);
if (gadget_ == null) {
sb.append(ItemsSketch.getInstance(this.clazz_, maxK_, comparator_).toString());
return sb.toString();
}
sb.append(gadget_.toString(sketchSummary, dataDetail));
return sb.toString();
}
/**
* Serialize this union to a byte array. Result is an ItemsSketch, serialized in an
* unordered, non-compact form. The resulting byte[] can be passed to getInstance for either a
* sketch or union.
*
* @param serDe an instance of ArrayOfItemsSerDe
* @return byte array of this union
*/
public byte[] toByteArray(final ArrayOfItemsSerDe<T> serDe) {
if (gadget_ == null) {
final ItemsSketch<T> sketch = ItemsSketch.getInstance(this.clazz_, maxK_, comparator_);
return sketch.toByteArray(serDe);
}
return gadget_.toByteArray(serDe);
}
//@formatter:off
@SuppressWarnings("unchecked")
static <T> ItemsSketch<T> updateLogic(final int myMaxK, final Comparator<? super T> comparator,
final ItemsSketch<T> myQS, final ItemsSketch<T> other) {
int sw1 = ((myQS == null) ? 0 : myQS.isEmpty() ? 4 : 8);
sw1 |= ((other == null) ? 0 : other.isEmpty() ? 1 : 2);
int outCase = 0; //0=null, 1=NOOP, 2=copy, 3=merge
switch (sw1) {
case 0: outCase = 0; break; //myQS = null, other = null ; return null
case 1: outCase = 4; break; //myQS = null, other = empty; create empty-heap(myMaxK)
case 2: outCase = 2; break; //myQS = null, other = valid; stream or downsample to myMaxK
case 4: outCase = 1; break; //myQS = empty, other = null ; no-op
case 5: outCase = 1; break; //myQS = empty, other = empty; no-op
case 6: outCase = 3; break; //myQS = empty, other = valid; merge
case 8: outCase = 1; break; //myQS = valid, other = null ; no-op
case 9: outCase = 1; break; //myQS = valid, other = empty: no-op
case 10: outCase = 3; break; //myQS = valid, other = valid; merge
default: break; //This cannot happen
}
ItemsSketch<T> ret = null;
switch (outCase) {
case 0: break;
case 1: ret = myQS; break;
case 2: { //myQS = null, other = valid; stream or downsample to myMaxK
assert other != null;
if (!other.isEstimationMode()) { //other is exact, stream items in
ret = ItemsSketch.getInstance(other.getSketchType(), myMaxK, comparator);
final int otherCnt = other.getBaseBufferCount();
final Object[] combBuf = other.getCombinedBuffer();
for (int i = 0; i < otherCnt; i++) {
ret.update((T) combBuf[i]);
}
}
else { //myQS = null, other is est mode
ret = (myMaxK < other.getK())
? other.downSample(myMaxK)
: ItemsSketch.copy(other); //required because caller has handle
}
break;
}
case 3: { //myQS = empty/valid, other = valid; merge
assert other != null;
assert myQS != null;
if (!other.isEstimationMode()) { //other is exact, stream items in
ret = myQS;
final int otherCnt = other.getBaseBufferCount();
final Object[] combBuf = other.getCombinedBuffer();
for (int i = 0; i < otherCnt; i++) {
ret.update((T) combBuf[i]);
}
}
else { //myQS = empty/valid, other = valid and in est mode
if (myQS.getK() <= other.getK()) { //I am smaller or equal, thus the target
ItemsMergeImpl.mergeInto(other, myQS);
ret = myQS;
}
else { //Bigger: myQS.getK() > other.getK(), must reverse roles
//must copy other as it will become mine and can't have any externally owned handles.
ret = ItemsSketch.copy(other);
ItemsMergeImpl.mergeInto(myQS, ret);
}
}
break;
}
case 4: {
assert other != null;
ret = ItemsSketch.getInstance(other.getSketchType(), Math.min(myMaxK, other.getK()), comparator);
break;
}
default: break; //This cannot happen
}
return ret;
}
//@formatter:on
}
| 2,819 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/ClassicUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import static java.lang.Math.abs;
import static java.lang.Math.ceil;
import static java.lang.Math.exp;
import static java.lang.Math.log;
import static java.lang.Math.max;
import static java.lang.Math.min;
import static java.lang.Math.pow;
import static java.lang.Math.round;
import static org.apache.datasketches.common.Util.ceilingIntPowerOf2;
import static org.apache.datasketches.common.Util.isIntPowerOf2;
import static org.apache.datasketches.quantiles.PreambleUtil.COMPACT_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.EMPTY_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.ORDERED_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.READ_ONLY_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.extractFlags;
import org.apache.datasketches.common.Family;
import org.apache.datasketches.common.SketchesArgumentException;
import org.apache.datasketches.memory.Memory;
/**
* Utilities for the classic quantiles sketches and independent of the type.
*
* @author Lee Rhodes
*/
public final class ClassicUtil {
static final int DOUBLES_SER_VER = 3;
static final int MAX_PRELONGS = Family.QUANTILES.getMaxPreLongs();
static final int MIN_K = 2;
static final int MAX_K = 1 << 15;
private ClassicUtil() {}
static final String LS = System.getProperty("line.separator");
static final char TAB = '\t';
/**
* Used by Classic Quantiles.
* Gets the normalized rank error given k and pmf for the Quantiles DoubleSketch and ItemsSketch.
* @param k the configuration parameter
* @param pmf if true, returns the "double-sided" normalized rank error for the getPMF() function.
* Otherwise, it is the "single-sided" normalized rank error for all the other queries.
* @return if pmf is true, the normalized rank error for the getPMF() function.
* Otherwise, it is the "single-sided" normalized rank error for all the other queries.
*/
// constants were derived as the best fit to 99 percentile empirically measured max error in
// thousands of trials
public static double getNormalizedRankError(final int k, final boolean pmf) {
return pmf
? 1.854 / pow(k, 0.9657)
: 1.576 / pow(k, 0.9726);
}
/**
* Used by Classic Quantiles.
* Gets the approximate <em>k</em> to use given epsilon, the normalized rank error
* for the Quantiles DoubleSketch and ItemsSketch.
* @param epsilon the normalized rank error between zero and one.
* @param pmf if true, this function returns <em>k</em> assuming the input epsilon
* is the desired "double-sided" epsilon for the getPMF() function. Otherwise, this function
* returns <em>k</em> assuming the input epsilon is the desired "single-sided"
* epsilon for all the other queries.
* @return <i>k</i> given epsilon.
*/
// constants were derived as the best fit to 99 percentile empirically measured max error in
// thousands of trials
public static int getKFromEpsilon(final double epsilon, final boolean pmf) {
//Ensure that eps is >= than the lowest possible eps given MAX_K and pmf=false.
final double eps = max(epsilon, 6.395E-5);
final double kdbl = pmf
? exp(log(1.854 / eps) / 0.9657)
: exp(log(1.576 / eps) / 0.9726);
final double krnd = round(kdbl);
final double del = abs(krnd - kdbl);
//round to closest int if within 1 ppm of the int, otherwise use the ceiling.
final int k = (int) ((del < 1E-6) ? krnd : ceil(kdbl));
return max(MIN_K, min(MAX_K, k));
}
/**
* Used by Classic Quantiles.
* Checks the validity of the given k
* @param k must be greater than 1 and less than 65536 and a power of 2.
*/
static void checkK(final int k) {
if ((k < MIN_K) || (k > MAX_K) || !isIntPowerOf2(k)) {
throw new SketchesArgumentException(
"K must be >= " + MIN_K + " and <= " + MAX_K + " and a power of 2: " + k);
}
}
/**
* Used by Classic Quantiles.
* Checks the validity of the given family ID
* @param familyID the given family ID
*/
static void checkFamilyID(final int familyID) {
final Family family = Family.idToFamily(familyID);
if (!family.equals(Family.QUANTILES)) {
throw new SketchesArgumentException(
"Possible corruption: Invalid Family: " + family.toString());
}
}
/**
* Used by Classic Quantiles.
* Checks the consistency of the flag bits and the state of preambleLong and the memory
* capacity and returns the empty state.
* @param preambleLongs the size of preamble in longs
* @param flags the flags field
* @param memCapBytes the memory capacity
* @return the empty state
*/
static boolean checkPreLongsFlagsCap(final int preambleLongs, final int flags, final long memCapBytes) {
final boolean empty = (flags & EMPTY_FLAG_MASK) > 0; //Preamble flags empty state
final int minPre = Family.QUANTILES.getMinPreLongs(); //1
final int maxPre = Family.QUANTILES.getMaxPreLongs(); //2
final boolean valid = ((preambleLongs == minPre) && empty) || ((preambleLongs == maxPre) && !empty);
if (!valid) {
throw new SketchesArgumentException(
"Possible corruption: PreambleLongs inconsistent with empty state: " + preambleLongs);
}
checkHeapFlags(flags);
if (memCapBytes < (preambleLongs << 3)) {
throw new SketchesArgumentException(
"Possible corruption: Insufficient capacity for preamble: " + memCapBytes);
}
return empty;
}
/**
* Used by Classic Quantiles.
* Checks just the flags field of the preamble. Allowed flags are Read Only, Empty, Compact, and
* ordered.
* @param flags the flags field
*/
static void checkHeapFlags(final int flags) { //only used by checkPreLongsFlagsCap and test
final int allowedFlags =
READ_ONLY_FLAG_MASK | EMPTY_FLAG_MASK | COMPACT_FLAG_MASK | ORDERED_FLAG_MASK;
final int flagsMask = ~allowedFlags;
if ((flags & flagsMask) > 0) {
throw new SketchesArgumentException(
"Possible corruption: Invalid flags field: " + Integer.toBinaryString(flags));
}
}
/**
* Used by Classic Quantiles.
* Checks just the flags field of an input Memory object. Returns true for a compact
* sketch, false for an update sketch. Does not perform additional checks, including sketch
* family.
* @param srcMem the source Memory containing a sketch
* @return true if flags indicate a compact sketch, otherwise false
*/
static boolean checkIsCompactMemory(final Memory srcMem) {
// only reading so downcast is ok
final int flags = extractFlags(srcMem);
final int compactFlags = READ_ONLY_FLAG_MASK | COMPACT_FLAG_MASK;
return (flags & compactFlags) > 0;
}
/**
* Used by Classic Quantiles.
* Returns the number of retained valid items in the sketch given k and n.
* @param k the given configured k of the sketch
* @param n the current number of items seen by the sketch
* @return the number of retained items in the sketch given k and n.
*/
static int computeRetainedItems(final int k, final long n) {
final int bbCnt = computeBaseBufferItems(k, n);
final long bitPattern = computeBitPattern(k, n);
final int validLevels = computeValidLevels(bitPattern);
return bbCnt + (validLevels * k);
}
/**
* Used by Classic Quantiles.
* Returns the total item capacity of an updatable, non-compact combined buffer
* given <i>k</i> and <i>n</i>. If total levels = 0, this returns the ceiling power of 2
* size for the base buffer or the MIN_BASE_BUF_SIZE, whichever is larger.
*
* @param k sketch parameter. This determines the accuracy of the sketch and the
* size of the updatable data structure, which is a function of <i>k</i> and <i>n</i>.
*
* @param n The number of items in the input stream
* @return the current item capacity of the combined buffer
*/
static int computeCombinedBufferItemCapacity(final int k, final long n) {
final int totLevels = computeNumLevelsNeeded(k, n);
if (totLevels == 0) {
final int bbItems = computeBaseBufferItems(k, n);
return Math.max(2 * MIN_K, ceilingIntPowerOf2(bbItems));
}
return (2 + totLevels) * k;
}
/**
* Used by Classic Quantiles.
* Computes the number of valid levels above the base buffer
* @param bitPattern the bit pattern
* @return the number of valid levels above the base buffer
*/
static int computeValidLevels(final long bitPattern) {
return Long.bitCount(bitPattern);
}
/**
* Used by Classic Quantiles.
* Computes the total number of logarithmic levels above the base buffer given the bitPattern.
* @param bitPattern the given bit pattern
* @return the total number of logarithmic levels above the base buffer
*/
static int computeTotalLevels(final long bitPattern) {
return hiBitPos(bitPattern) + 1;
}
/**
* Used by Classic Quantiles.
* Computes the total number of logarithmic levels above the base buffer given k and n.
* This is equivalent to max(floor(lg(n/k), 0).
* Returns zero if n is less than 2 * k.
* @param k the configured size of the sketch
* @param n the total number presented to the sketch.
* @return the total number of levels needed.
*/
static int computeNumLevelsNeeded(final int k, final long n) {
return 1 + hiBitPos(n / (2L * k));
}
/**
* Used by Classic Quantiles.
* Computes the number of base buffer items given k, n
* @param k the configured size of the sketch
* @param n the total number presented to the sketch
* @return the number of base buffer items
*/
static int computeBaseBufferItems(final int k, final long n) {
return (int) (n % (2L * k));
}
/**
* Used by Classic Quantiles.
* Computes the levels bit pattern given k, n.
* This is computed as <i>n / (2*k)</i>.
* @param k the configured size of the sketch
* @param n the total number presented to the sketch.
* @return the levels bit pattern
*/
static long computeBitPattern(final int k, final long n) {
return n / (2L * k);
}
/**
* Used by Classic Quantiles.
* Zero-based position of the highest one-bit of the given long.
* Returns minus one if num is zero.
* @param num the given long
* @return Zero-based position of the highest one-bit of the given long
*/
static int hiBitPos(final long num) {
return 63 - Long.numberOfLeadingZeros(num);
}
/**
* Used by Classic Quantiles.
* Returns the zero-based bit position of the lowest zero bit of <i>bits</i> starting at
* <i>startingBit</i>. If input is all ones, this returns 64.
* @param bits the input bits as a long
* @param startingBit the zero-based starting bit position. Only the low 6 bits are used.
* @return the zero-based bit position of the lowest zero bit starting at <i>startingBit</i>.
*/
static int lowestZeroBitStartingAt(final long bits, final int startingBit) {
int pos = startingBit & 0X3F;
long myBits = bits >>> pos;
while ((myBits & 1L) != 0) {
myBits = myBits >>> 1;
pos++;
}
return pos;
}
}
| 2,820 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/ItemsByteArrayImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import static org.apache.datasketches.quantiles.PreambleUtil.COMPACT_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.EMPTY_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.ORDERED_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.insertFamilyID;
import static org.apache.datasketches.quantiles.PreambleUtil.insertFlags;
import static org.apache.datasketches.quantiles.PreambleUtil.insertK;
import static org.apache.datasketches.quantiles.PreambleUtil.insertN;
import static org.apache.datasketches.quantiles.PreambleUtil.insertPreLongs;
import static org.apache.datasketches.quantiles.PreambleUtil.insertSerVer;
import java.lang.reflect.Array;
import java.util.Arrays;
import org.apache.datasketches.common.ArrayOfItemsSerDe;
import org.apache.datasketches.common.Family;
import org.apache.datasketches.memory.WritableMemory;
/**
* The items to byte array algorithms.
*
* @author Lee Rhodes
* @author Alexander Saydakov
*/
final class ItemsByteArrayImpl {
private ItemsByteArrayImpl() {}
static <T> byte[] toByteArray(final ItemsSketch<T> sketch, final boolean ordered,
final ArrayOfItemsSerDe<T> serDe) {
final boolean empty = sketch.isEmpty();
final int flags = (empty ? EMPTY_FLAG_MASK : 0)
| (ordered ? ORDERED_FLAG_MASK : 0)
| COMPACT_FLAG_MASK; //always compact
if (empty) {
final byte[] outByteArr = new byte[Long.BYTES];
final WritableMemory memOut = WritableMemory.writableWrap(outByteArr);
final int preLongs = 1;
insertPre0(memOut, preLongs, flags, sketch.getK());
return outByteArr;
}
//not empty
final T[] dataArr = combinedBufferToItemsArray(sketch, ordered); //includes min and max
final int preLongs = 2;
final byte[] itemsByteArr = serDe.serializeToByteArray(dataArr);
final int numOutBytes = (preLongs << 3) + itemsByteArr.length;
final byte[] outByteArr = new byte[numOutBytes];
final WritableMemory memOut = WritableMemory.writableWrap(outByteArr);
//insert preamble
insertPre0(memOut, preLongs, flags, sketch.getK());
insertN(memOut, sketch.getN());
//insert data
memOut.putByteArray(preLongs << 3, itemsByteArr, 0, itemsByteArr.length);
return outByteArr;
}
/**
* Returns an array of items in compact form, including min and max extracted from the
* Combined Buffer.
* @param <T> the data type
* @param sketch a type of ItemsSketch
* @param ordered true if the desired form of the resulting array has the base buffer sorted.
* @return an array of items, including min and max extracted from the Combined Buffer.
*/
@SuppressWarnings("unchecked")
private static <T> T[] combinedBufferToItemsArray(final ItemsSketch<T> sketch,
final boolean ordered) {
final int extra = 2; // extra space for min and max items
final int outArrCap = sketch.getNumRetained();
final T minItem = sketch.getMinItem();
final T[] outArr = (T[]) Array.newInstance(minItem.getClass(), outArrCap + extra);
//Load min, max
outArr[0] = minItem;
outArr[1] = sketch.getMaxItem();
final int baseBufferCount = sketch.getBaseBufferCount();
final Object[] combinedBuffer = sketch.getCombinedBuffer();
//Load base buffer
System.arraycopy(combinedBuffer, 0, outArr, extra, baseBufferCount);
//Load levels
long bitPattern = sketch.getBitPattern();
if (bitPattern > 0) {
final int k = sketch.getK();
int index = extra + baseBufferCount;
for (int level = 0; bitPattern != 0L; level++, bitPattern >>>= 1) {
if ((bitPattern & 1L) > 0L) {
System.arraycopy(combinedBuffer, (2 + level) * k, outArr, index, k);
index += k;
}
}
}
if (ordered) {
Arrays.sort(outArr, extra, baseBufferCount + extra, sketch.getComparator());
}
return outArr;
}
private static void insertPre0(final WritableMemory wmem,
final int preLongs, final int flags, final int k) {
insertPreLongs(wmem, preLongs);
insertSerVer(wmem, ItemsUtil.ITEMS_SER_VER);
insertFamilyID(wmem, Family.QUANTILES.getID());
insertFlags(wmem, flags);
insertK(wmem, k);
}
}
| 2,821 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/DoublesSketchIterator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import java.util.Objects;
import org.apache.datasketches.common.SketchesStateException;
import org.apache.datasketches.quantilescommon.QuantilesDoublesSketchIterator;
/**
* Iterator over DoublesSketch. The order is not defined.
*/
public final class DoublesSketchIterator implements QuantilesDoublesSketchIterator {
private DoublesSketchAccessor sketchAccessor;
private long bitPattern;
private int level;
private long weight;
private int index;
DoublesSketchIterator(final DoublesSketch sketch, final long bitPattern) {
Objects.requireNonNull(sketch, "sketch must not be null");
sketchAccessor = DoublesSketchAccessor.wrap(sketch);
this.bitPattern = bitPattern;
this.level = -1;
this.weight = 1;
this.index = -1;
}
@Override
public double getQuantile() {
if (index < 0) { throw new SketchesStateException("index < 0; getQuantile() was called before next()"); }
return sketchAccessor.get(index);
}
@Override
public long getWeight() {
return weight;
}
@Override
public boolean next() {
index++; // advance index within the current level
if (index < sketchAccessor.numItems()) {
return true;
}
// go to the next non-empty level
do {
level++;
if (level > 0) {
bitPattern >>>= 1;
}
if (bitPattern == 0L) {
return false; // run out of levels
}
weight *= 2;
} while ((bitPattern & 1L) == 0L);
index = 0;
sketchAccessor.setLevel(level);
return true;
}
}
| 2,822 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/DirectUpdateDoublesSketch.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import static org.apache.datasketches.quantiles.ClassicUtil.DOUBLES_SER_VER;
import static org.apache.datasketches.quantiles.ClassicUtil.checkFamilyID;
import static org.apache.datasketches.quantiles.ClassicUtil.checkK;
import static org.apache.datasketches.quantiles.ClassicUtil.computeBitPattern;
import static org.apache.datasketches.quantiles.PreambleUtil.COMBINED_BUFFER;
import static org.apache.datasketches.quantiles.PreambleUtil.EMPTY_FLAG_MASK;
import static org.apache.datasketches.quantiles.PreambleUtil.FLAGS_BYTE;
import static org.apache.datasketches.quantiles.PreambleUtil.MAX_DOUBLE;
import static org.apache.datasketches.quantiles.PreambleUtil.MIN_DOUBLE;
import static org.apache.datasketches.quantiles.PreambleUtil.N_LONG;
import static org.apache.datasketches.quantiles.PreambleUtil.extractFamilyID;
import static org.apache.datasketches.quantiles.PreambleUtil.extractFlags;
import static org.apache.datasketches.quantiles.PreambleUtil.extractK;
import static org.apache.datasketches.quantiles.PreambleUtil.extractN;
import static org.apache.datasketches.quantiles.PreambleUtil.extractPreLongs;
import static org.apache.datasketches.quantiles.PreambleUtil.extractSerVer;
import static org.apache.datasketches.quantiles.PreambleUtil.insertFamilyID;
import static org.apache.datasketches.quantiles.PreambleUtil.insertFlags;
import static org.apache.datasketches.quantiles.PreambleUtil.insertK;
import static org.apache.datasketches.quantiles.PreambleUtil.insertMaxDouble;
import static org.apache.datasketches.quantiles.PreambleUtil.insertMinDouble;
import static org.apache.datasketches.quantiles.PreambleUtil.insertN;
import static org.apache.datasketches.quantiles.PreambleUtil.insertPreLongs;
import static org.apache.datasketches.quantiles.PreambleUtil.insertSerVer;
import org.apache.datasketches.common.Family;
import org.apache.datasketches.common.SketchesArgumentException;
import org.apache.datasketches.memory.MemoryRequestServer;
import org.apache.datasketches.memory.WritableMemory;
/**
* Implements the DoublesSketch off-heap.
*
* @author Kevin Lang
* @author Lee Rhodes
*
*/
final class DirectUpdateDoublesSketch extends DirectUpdateDoublesSketchR {
MemoryRequestServer memReqSvr = null;
private DirectUpdateDoublesSketch(final int k) {
super(k); //Checks k
}
/**
* Obtains a new Direct instance of a DoublesSketch, which may be off-heap.
*
* @param k Parameter that controls space usage of sketch and accuracy of estimates.
* Must be greater than 1 and less than 65536 and a power of 2.
* @param dstMem the destination Memory that will be initialized to hold the data for this sketch.
* It must initially be at least (16 * MIN_K + 32) bytes, where MIN_K defaults to 2. As it grows
* it will request more memory using the MemoryRequest callback.
* @return a DirectUpdateDoublesSketch
*/
static DirectUpdateDoublesSketch newInstance(final int k, final WritableMemory dstMem) {
// must be able to hold at least an empty sketch
final long memCap = dstMem.getCapacity();
checkDirectMemCapacity(k, 0, memCap);
//initialize dstMem
dstMem.putLong(0, 0L); //clear pre0
insertPreLongs(dstMem, 2);
insertSerVer(dstMem, DOUBLES_SER_VER);
insertFamilyID(dstMem, Family.QUANTILES.getID());
insertFlags(dstMem, EMPTY_FLAG_MASK);
insertK(dstMem, k);
if (memCap >= COMBINED_BUFFER) {
insertN(dstMem, 0L);
insertMinDouble(dstMem, Double.NaN);
insertMaxDouble(dstMem, Double.NaN);
}
final DirectUpdateDoublesSketch dds = new DirectUpdateDoublesSketch(k);
dds.mem_ = dstMem;
return dds;
}
/**
* Wrap this sketch around the given non-compact Memory image of a DoublesSketch.
*
* @param srcMem the given non-compact Memory image of a DoublesSketch that may have data
* @return a sketch that wraps the given srcMem
*/
static DirectUpdateDoublesSketch wrapInstance(final WritableMemory srcMem) {
final long memCap = srcMem.getCapacity();
final int preLongs = extractPreLongs(srcMem);
final int serVer = extractSerVer(srcMem);
final int familyID = extractFamilyID(srcMem);
final int flags = extractFlags(srcMem);
final int k = extractK(srcMem);
final boolean empty = (flags & EMPTY_FLAG_MASK) > 0; //Preamble flags empty state
final long n = empty ? 0 : extractN(srcMem);
//VALIDITY CHECKS
checkPreLongs(preLongs);
checkFamilyID(familyID);
DoublesUtil.checkDoublesSerVer(serVer, MIN_DIRECT_DOUBLES_SER_VER);
checkDirectFlags(flags); //Cannot be compact
checkK(k);
checkCompact(serVer, flags);
checkDirectMemCapacity(k, n, memCap);
checkEmptyAndN(empty, n);
final DirectUpdateDoublesSketch dds = new DirectUpdateDoublesSketch(k);
dds.mem_ = srcMem;
return dds;
}
@Override
public boolean isReadOnly() {
return false;
}
@Override
public void update(final double dataItem) {
if (Double.isNaN(dataItem)) { return; }
final int curBBCount = getBaseBufferCount();
final int newBBCount = curBBCount + 1; //derived, not stored
//must check memory capacity before we put anything in it
final int combBufItemCap = getCombinedBufferItemCapacity();
if (newBBCount > combBufItemCap) {
//only changes combinedBuffer when it is only a base buffer
mem_ = growCombinedMemBuffer(2 * getK());
}
final long curN = getN();
final long newN = curN + 1;
if (curN == 0) { //set min and max quantiles
putMaxItem(dataItem);
putMinItem(dataItem);
} else {
if (dataItem > getMaxItem()) { putMaxItem(dataItem); }
if (dataItem < getMinItem()) { putMinItem(dataItem); }
}
mem_.putDouble(COMBINED_BUFFER + ((long) curBBCount * Double.BYTES), dataItem); //put the item
mem_.putByte(FLAGS_BYTE, (byte) 0); //not compact, not ordered, not empty
if (newBBCount == (2 * k_)) { //Propagate
// make sure there will be enough levels for the propagation
final int curMemItemCap = getCombinedBufferItemCapacity();
final int itemSpaceNeeded = DoublesUpdateImpl.getRequiredItemCapacity(k_, newN);
//check mem has capacity to accommodate new level
if (itemSpaceNeeded > curMemItemCap) {
// copies base buffer plus old levels, adds space for new level
mem_ = growCombinedMemBuffer(itemSpaceNeeded);
}
// sort base buffer via accessor which modifies the underlying base buffer,
// then use as one of the inputs to propagate-carry
final DoublesSketchAccessor bbAccessor = DoublesSketchAccessor.wrap(this, true);
bbAccessor.sort();
final long newBitPattern = DoublesUpdateImpl.inPlacePropagateCarry(
0, // starting level
null,
bbAccessor,
true,
k_,
DoublesSketchAccessor.wrap(this, true),
getBitPattern()
);
assert newBitPattern == computeBitPattern(k_, newN); // internal consistency check
//bit pattern on direct is always derived, no need to save it.
}
putN(newN);
classicQdsSV = null;
}
@Override
public void reset() {
if (mem_.getCapacity() >= COMBINED_BUFFER) {
mem_.putByte(FLAGS_BYTE, (byte) EMPTY_FLAG_MASK); //not compact, not ordered
mem_.putLong(N_LONG, 0L);
mem_.putDouble(MIN_DOUBLE, Double.NaN);
mem_.putDouble(MAX_DOUBLE, Double.NaN);
}
}
//Restricted overrides
//Puts
@Override
void putMinItem(final double minQuantile) {
assert (mem_.getCapacity() >= COMBINED_BUFFER);
mem_.putDouble(MIN_DOUBLE, minQuantile);
}
@Override
void putMaxItem(final double maxQuantile) {
assert (mem_.getCapacity() >= COMBINED_BUFFER);
mem_.putDouble(MAX_DOUBLE, maxQuantile);
}
@Override
void putN(final long n) {
assert (mem_.getCapacity() >= COMBINED_BUFFER);
mem_.putLong(N_LONG, n);
}
@Override
void putCombinedBuffer(final double[] combinedBuffer) {
mem_.putDoubleArray(COMBINED_BUFFER, combinedBuffer, 0, combinedBuffer.length);
}
@Override
void putBaseBufferCount(final int baseBufferCount) {
//intentionally a no-op, not kept on-heap, always derived.
}
@Override
void putBitPattern(final long bitPattern) {
//intentionally a no-op, not kept on-heap, always derived.
}
@Override
double[] growCombinedBuffer(final int curCombBufItemCap, final int itemSpaceNeeded) {
mem_ = growCombinedMemBuffer(itemSpaceNeeded);
// copy out any data that was there
final double[] newCombBuf = new double[itemSpaceNeeded];
mem_.getDoubleArray(COMBINED_BUFFER, newCombBuf, 0, curCombBufItemCap);
return newCombBuf;
}
//Direct supporting methods
private WritableMemory growCombinedMemBuffer(final int itemSpaceNeeded) {
final long memBytes = mem_.getCapacity();
final int needBytes = (itemSpaceNeeded << 3) + COMBINED_BUFFER; //+ preamble + min & max
assert needBytes > memBytes;
memReqSvr = (memReqSvr == null) ? mem_.getMemoryRequestServer() : memReqSvr;
if (memReqSvr == null) {
throw new SketchesArgumentException(
"A request for more memory has been denied, "
+ "or a default MemoryRequestServer has not been provided. Must abort. ");
}
final WritableMemory newMem = memReqSvr.request(mem_, needBytes);
mem_.copyTo(0, newMem, 0, memBytes);
memReqSvr.requestClose(mem_, newMem);
return newMem;
}
}
| 2,823 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/DoublesUnionBuilder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import org.apache.datasketches.memory.WritableMemory;
/**
* For building a new DoublesSketch Union operation.
*
* @author Lee Rhodes
*/
public class DoublesUnionBuilder {
private int bMaxK = PreambleUtil.DEFAULT_K;
/**
* Constructor for a new DoublesUnionBuilder. The default configuration is
* <ul>
* <li>k: 128. This produces a normalized rank error of about 1.7%</li>
* <li>Memory: null</li>
* </ul>
*/
public DoublesUnionBuilder() {}
/**
* Sets the parameter <i>masK</i> that determines the maximum size of the sketch that
* results from a union and its accuracy.
* @param maxK determines the accuracy and size of the union and is a maximum.
* The effective <i>k</i> can be smaller due to unions with smaller <i>k</i> sketches.
* It is recommended that <i>maxK</i> be a power of 2 to enable unioning of sketches with
* different <i>k</i>.
* @return this builder
*/
public DoublesUnionBuilder setMaxK(final int maxK) {
ClassicUtil.checkK(maxK);
bMaxK = maxK;
return this;
}
/**
* Gets the current configured <i>maxK</i>
* @return the current configured <i>maxK</i>
*/
public int getMaxK() {
return bMaxK;
}
/**
* Returns a new empty Union object with the current configuration of this Builder.
* @return a Union object
*/
public DoublesUnion build() {
return DoublesUnionImpl.heapInstance(bMaxK);
}
/**
* Returns a new empty Union object with the current configuration of this Builder
* and the specified backing destination Memory store.
* @param dstMem the destination memory
* @return a Union object
*/
public DoublesUnion build(final WritableMemory dstMem) {
return DoublesUnionImpl.directInstance(bMaxK, dstMem);
}
}
| 2,824 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/PreambleUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import static org.apache.datasketches.common.Family.idToFamily;
import static org.apache.datasketches.quantiles.ClassicUtil.LS;
import static org.apache.datasketches.quantiles.ClassicUtil.computeRetainedItems;
import java.nio.ByteOrder;
import org.apache.datasketches.memory.Memory;
import org.apache.datasketches.memory.WritableMemory;
//@formatter:off
/**
* This class defines the serialized data structure and provides access methods for the key fields.
*
* <p>The intent of the design of this class was to isolate the detailed knowledge of the bit and
* byte layout of the serialized form of the sketches derived from the base sketch classes into one place.
* This allows the possibility of the introduction of different serialization
* schemes with minimal impact on the rest of the library.</p>
*
* <p>
* LAYOUT: The low significance bytes of this <i>long</i> based data structure are on the right.
* The multi-byte primitives are stored in native byte order.
* The single byte fields are treated as unsigned.</p>
*
* <p>An empty ItemsSketch, on-heap DoublesSketch or compact off-heap DoublesSketch only require 8
* bytes. An off-heap UpdateDoublesSketch and all non-empty sketches require at least 16 bytes of
* preamble.</p>
*
* <pre>{@code
* Long || Start Byte Adr: Common for both DoublesSketch and ItemsSketch
* Adr:
* || 7 | 6 | 5 | 4 | 3 | 2 | 1 | 0 |
* 0 ||------unused-----|--------K--------| Flags | FamID | SerVer | Preamble_Longs |
*
* || 15 | 14 | 13 | 12 | 11 | 10 | 9 | 8 |
* 1 ||-----------------------------------N_LONG--------------------------------------|
*
* Applies only to DoublesSketch:
*
* || 23 | 22 | 21 | 20 | 19 | 18 | 17 | 16 |
* 2 ||---------------------------START OF DATA, MIN_DOUBLE---------------------------|
*
* || 31 | 30 | 29 | 28 | 27 | 26 | 25 | 24 |
* 3 ||----------------------------------MAX_DOUBLE-----------------------------------|
*
* || 39 | 38 | 37 | 36 | 35 | 34 | 33 | 32 |
* 4 ||---------------------------START OF COMBINED BUfFER----------------------------|
* }</pre>
*
* @author Lee Rhodes
*/
final class PreambleUtil {
private PreambleUtil() {}
// ###### DO NOT MESS WITH THIS FROM HERE ...
// Preamble byte Addresses
static final int PREAMBLE_LONGS_BYTE = 0;
static final int SER_VER_BYTE = 1;
static final int FAMILY_BYTE = 2;
static final int FLAGS_BYTE = 3;
static final int K_SHORT = 4; //to 5
static final int N_LONG = 8; //to 15
//After Preamble:
static final int MIN_DOUBLE = 16; //to 23 (Only for DoublesSketch)
static final int MAX_DOUBLE = 24; //to 31 (Only for DoublesSketch)
static final int COMBINED_BUFFER = 32; //to 39 (Only for DoublesSketch)
// flag bit masks
static final int BIG_ENDIAN_FLAG_MASK = 1;
static final int READ_ONLY_FLAG_MASK = 2;
static final int EMPTY_FLAG_MASK = 4;
static final int COMPACT_FLAG_MASK = 8;
static final int ORDERED_FLAG_MASK = 16;
static final boolean NATIVE_ORDER_IS_BIG_ENDIAN =
(ByteOrder.nativeOrder() == ByteOrder.BIG_ENDIAN);
/**
* Default K for about 1.7% normalized rank accuracy
*/
static final int DEFAULT_K = 128;
// ###### TO HERE.
// STRINGS
/**
* Returns a human readable string summary of the internal state of the given byte array.
* Used primarily in testing.
*
* @param byteArr the given byte array.
* @param isDoublesSketch flag to indicate that the byte array represents DoublesSketch
* to output min and max quantiles in the summary
* @return the summary string.
*/
static String toString(final byte[] byteArr, final boolean isDoublesSketch) {
final Memory mem = Memory.wrap(byteArr);
return toString(mem, isDoublesSketch);
}
/**
* Returns a human readable string summary of the Preamble of the given Memory. If this Memory
* image is from a DoublesSketch, the MinQuantile and MaxQuantile will also be output.
* Used primarily in testing.
*
* @param mem the given Memory
* @param isDoublesSketch flag to indicate that the byte array represents DoublesSketch
* to output min and max quantiles in the summary
* @return the summary string.
*/
static String toString(final Memory mem, final boolean isDoublesSketch) {
return memoryToString(mem, isDoublesSketch);
}
private static String memoryToString(final Memory srcMem, final boolean isDoublesSketch) {
final int preLongs = extractPreLongs(srcMem); //either 1 or 2
final int serVer = extractSerVer(srcMem);
final int familyID = extractFamilyID(srcMem);
final String famName = idToFamily(familyID).toString();
final int flags = extractFlags(srcMem);
final boolean bigEndian = (flags & BIG_ENDIAN_FLAG_MASK) > 0;
final String nativeOrder = ByteOrder.nativeOrder().toString();
final boolean readOnly = (flags & READ_ONLY_FLAG_MASK) > 0;
final boolean empty = (flags & EMPTY_FLAG_MASK) > 0;
final boolean compact = (flags & COMPACT_FLAG_MASK) > 0;
final boolean ordered = (flags & ORDERED_FLAG_MASK) > 0;
final int k = extractK(srcMem);
final long n = (preLongs == 1) ? 0L : extractN(srcMem);
double minDouble = Double.NaN;
double maxDouble = Double.NaN;
if ((preLongs > 1) && isDoublesSketch) { // preLongs = 2 or 3
minDouble = extractMinDouble(srcMem);
maxDouble = extractMaxDouble(srcMem);
}
final StringBuilder sb = new StringBuilder();
sb.append(LS);
sb.append("### QUANTILES SKETCH PREAMBLE SUMMARY:").append(LS);
sb.append("Byte 0: Preamble Longs : ").append(preLongs).append(LS);
sb.append("Byte 1: Serialization Version: ").append(serVer).append(LS);
sb.append("Byte 2: Family : ").append(famName).append(LS);
sb.append("Byte 3: Flags Field : ").append(String.format("%02o", flags)).append(LS);
sb.append(" BIG ENDIAN : ").append(bigEndian).append(LS);
sb.append(" (Native Byte Order) : ").append(nativeOrder).append(LS);
sb.append(" READ ONLY : ").append(readOnly).append(LS);
sb.append(" EMPTY : ").append(empty).append(LS);
sb.append(" COMPACT : ").append(compact).append(LS);
sb.append(" ORDERED : ").append(ordered).append(LS);
sb.append("Bytes 4-5 : K : ").append(k).append(LS);
if (preLongs == 1) {
sb.append(" --ABSENT, ASSUMED:").append(LS);
}
sb.append("Bytes 8-15 : N : ").append(n).append(LS);
if (isDoublesSketch) {
sb.append("MinDouble : ").append(minDouble).append(LS);
sb.append("MaxDouble : ").append(maxDouble).append(LS);
}
sb.append("Retained Items : ").append(computeRetainedItems(k, n)).append(LS);
sb.append("Total Bytes : ").append(srcMem.getCapacity()).append(LS);
sb.append("### END SKETCH PREAMBLE SUMMARY").append(LS);
return sb.toString();
}
//@formatter:on
static int extractPreLongs(final Memory mem) {
return mem.getByte(PREAMBLE_LONGS_BYTE) & 0XFF;
}
static int extractSerVer(final Memory mem) {
return mem.getByte(SER_VER_BYTE) & 0XFF;
}
static int extractFamilyID(final Memory mem) {
return mem.getByte(FAMILY_BYTE) & 0XFF;
}
static int extractFlags(final Memory mem) {
return mem.getByte(FLAGS_BYTE) & 0XFF;
}
static int extractK(final Memory mem) {
return mem.getShort(K_SHORT) & 0XFFFF;
}
static long extractN(final Memory mem) {
return mem.getLong(N_LONG);
}
static double extractMinDouble(final Memory mem) {
return mem.getDouble(MIN_DOUBLE);
}
static double extractMaxDouble(final Memory mem) {
return mem.getDouble(MAX_DOUBLE);
}
static void insertPreLongs(final WritableMemory wmem, final int numPreLongs) {
wmem.putByte(PREAMBLE_LONGS_BYTE, (byte) numPreLongs);
}
static void insertSerVer(final WritableMemory wmem, final int serVer) {
wmem.putByte(SER_VER_BYTE, (byte) serVer);
}
static void insertFamilyID(final WritableMemory wmem, final int famId) {
wmem.putByte(FAMILY_BYTE, (byte) famId);
}
static void insertFlags(final WritableMemory wmem, final int flags) {
wmem.putByte(FLAGS_BYTE, (byte) flags);
}
static void insertK(final WritableMemory wmem, final int k) {
wmem.putShort(K_SHORT, (short) k);
}
static void insertN(final WritableMemory wmem, final long n) {
wmem.putLong(N_LONG, n);
}
static void insertMinDouble(final WritableMemory wmem, final double minDouble) {
wmem.putDouble(MIN_DOUBLE, minDouble);
}
static void insertMaxDouble(final WritableMemory wmem, final double maxDouble) {
wmem.putDouble(MAX_DOUBLE, maxDouble);
}
}
| 2,825 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/package-info.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* <p>The quantiles package contains stochastic streaming algorithms that enable single-pass
* analysis of the distribution of a stream of quantiles.
* </p>
*
* @see org.apache.datasketches.quantiles.DoublesSketch
* @see org.apache.datasketches.quantiles.ItemsSketch
*/
package org.apache.datasketches.quantiles;
| 2,826 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/HeapDoublesSketchAccessor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import java.util.Arrays;
/**
* @author Jon Malkin
*/
class HeapDoublesSketchAccessor extends DoublesSketchAccessor {
HeapDoublesSketchAccessor(final DoublesSketch ds,
final boolean forceSize,
final int level) {
super(ds, forceSize, level);
assert !ds.hasMemory();
}
@Override
DoublesSketchAccessor copyAndSetLevel(final int level) {
return new HeapDoublesSketchAccessor(ds_, forceSize_, level);
}
@Override
double get(final int index) {
assert index >= 0 && index < numItems_;
assert n_ == ds_.getN();
return ds_.getCombinedBuffer()[offset_ + index];
}
@Override
double set(final int index, final double quantile) {
assert index >= 0 && index < numItems_;
assert n_ == ds_.getN();
final int idxOffset = offset_ + index;
final double oldVal = ds_.getCombinedBuffer()[idxOffset];
ds_.getCombinedBuffer()[idxOffset] = quantile;
return oldVal;
}
@Override
double[] getArray(final int fromIdx, final int numItems) {
final int stIdx = offset_ + fromIdx;
return Arrays.copyOfRange(ds_.getCombinedBuffer(), stIdx, stIdx + numItems);
}
@Override
void putArray(final double[] srcArray, final int srcIndex,
final int dstIndex, final int numItems) {
final int tgtIdx = offset_ + dstIndex;
System.arraycopy(srcArray, srcIndex, ds_.getCombinedBuffer(), tgtIdx, numItems);
}
@Override
void sort() {
assert currLvl_ == BB_LVL_IDX;
if (!ds_.isCompact()) { // compact sketch is already sorted; not an error but a no-op
Arrays.sort(ds_.getCombinedBuffer(), offset_, offset_ + numItems_);
}
}
}
| 2,827 |
0 | Create_ds/datasketches-java/src/main/java/org/apache/datasketches | Create_ds/datasketches-java/src/main/java/org/apache/datasketches/quantiles/ItemsUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.quantiles;
import static org.apache.datasketches.common.Util.LS;
import java.util.Arrays;
import org.apache.datasketches.common.SketchesArgumentException;
/**
* Utility class for generic quantiles sketch.
*
* @author Kevin Lang
* @author Alexander Saydakov
*/
final class ItemsUtil {
private ItemsUtil() {}
static final int ITEMS_SER_VER = 3;
static final int PRIOR_ITEMS_SER_VER = 2;
/**
* Check the validity of the given serialization version
* @param serVer the given serialization version
*/
static void checkItemsSerVer(final int serVer) {
if ((serVer == ITEMS_SER_VER) || (serVer == PRIOR_ITEMS_SER_VER)) { return; }
throw new SketchesArgumentException(
"Possible corruption: Invalid Serialization Version: " + serVer);
}
/**
* Called when the base buffer has just acquired 2*k elements.
* @param <T> the data type
* @param sketch the given quantiles sketch
*/
@SuppressWarnings("unchecked")
static <T> void processFullBaseBuffer(final ItemsSketch<T> sketch) {
final int bbCount = sketch.getBaseBufferCount();
final long n = sketch.getN();
assert bbCount == (2 * sketch.getK()); // internal consistency check
// make sure there will be enough levels for the propagation
ItemsUpdateImpl.maybeGrowLevels(sketch, n); // important: n_ was incremented by update before we got here
// this aliasing is a bit dangerous; notice that we did it after the possible resizing
final Object[] baseBuffer = sketch.getCombinedBuffer();
Arrays.sort((T[]) baseBuffer, 0, bbCount, sketch.getComparator());
ItemsUpdateImpl.inPlacePropagateCarry(
0,
null, 0, // this null is okay
(T[]) baseBuffer, 0,
true, sketch);
sketch.baseBufferCount_ = 0;
Arrays.fill(baseBuffer, 0, 2 * sketch.getK(), null); // to release the discarded objects
assert (n / (2L * sketch.getK())) == sketch.getBitPattern(); // internal consistency check
}
static <T> String toString(final boolean sketchSummary, final boolean dataDetail, final ItemsSketch<T> sketch) {
final StringBuilder sb = new StringBuilder();
final String thisSimpleName = sketch.getClass().getSimpleName();
final int bbCount = sketch.getBaseBufferCount();
final int combAllocCount = sketch.getCombinedBufferAllocatedCount();
final int k = sketch.getK();
final long bitPattern = sketch.getBitPattern();
if (dataDetail) {
sb.append(ClassicUtil.LS).append("### ").append(thisSimpleName).append(" DATA DETAIL: ").append(ClassicUtil.LS);
final Object[] items = sketch.getCombinedBuffer();
//output the base buffer
sb.append(" BaseBuffer :");
if (bbCount > 0) {
for (int i = 0; i < bbCount; i++) {
sb.append(' ').append(items[i]);
}
}
sb.append(ClassicUtil.LS);
//output all the levels
final int numItems = combAllocCount;
if (numItems > (2 * k)) {
sb.append(" Valid | Level");
for (int j = 2 * k; j < numItems; j++) { //output level data starting at 2K
if ((j % k) == 0) { //start output of new level
final int levelNum = j > (2 * k) ? (j - (2 * k)) / k : 0;
final String validLvl = ((1L << levelNum) & bitPattern) > 0 ? " T " : " F ";
final String lvl = String.format("%5d", levelNum);
sb.append(ClassicUtil.LS).append(" ").append(validLvl).append(" ").append(lvl).append(":");
}
sb.append(' ').append(items[j]);
}
sb.append(ClassicUtil.LS);
}
sb.append("### END DATA DETAIL").append(ClassicUtil.LS);
}
if (sketchSummary) {
final long n = sketch.getN();
final String nStr = String.format("%,d", n);
final int numLevels = ClassicUtil.computeNumLevelsNeeded(k, n);
final String bufCntStr = String.format("%,d", combAllocCount);
final int preBytes = sketch.isEmpty() ? Long.BYTES : 2 * Long.BYTES;
final double epsPmf = ClassicUtil.getNormalizedRankError(k, true);
final String epsPmfPctStr = String.format("%.3f%%", epsPmf * 100.0);
final double eps = ClassicUtil.getNormalizedRankError(k, false);
final String epsPctStr = String.format("%.3f%%", eps * 100.0);
final int numSamples = sketch.getNumRetained();
final String numSampStr = String.format("%,d", numSamples);
final T minItem = sketch.isEmpty() ? null : sketch.getMinItem();
final T maxItem = sketch.isEmpty() ? null : sketch.getMaxItem();
sb.append(ClassicUtil.LS).append("### ").append(thisSimpleName).append(" SUMMARY: ").append(ClassicUtil.LS);
sb.append(" K : ").append(k).append(ClassicUtil.LS);
sb.append(" N : ").append(nStr).append(ClassicUtil.LS);
sb.append(" BaseBufferCount : ").append(bbCount).append(ClassicUtil.LS);
sb.append(" CombinedBufferAllocatedCount : ").append(bufCntStr).append(ClassicUtil.LS);
sb.append(" Total Levels : ").append(numLevels).append(ClassicUtil.LS);
sb.append(" Valid Levels : ").append(ClassicUtil.computeValidLevels(bitPattern))
.append(ClassicUtil.LS);
sb.append(" Level Bit Pattern : ").append(Long.toBinaryString(bitPattern))
.append(ClassicUtil.LS);
sb.append(" Valid Samples : ").append(numSampStr).append(ClassicUtil.LS);
sb.append(" Preamble Bytes : ").append(preBytes).append(ClassicUtil.LS);
sb.append(" Normalized Rank Error : ").append(epsPctStr).append(LS);
sb.append(" Normalized Rank Error (PMF) : ").append(epsPmfPctStr).append(LS);
sb.append(" Min Quantile : ").append(minItem).append(ClassicUtil.LS);
sb.append(" Max Quantile : ").append(maxItem).append(ClassicUtil.LS);
sb.append("### END SKETCH SUMMARY").append(ClassicUtil.LS);
}
return sb.toString();
}
}
| 2,828 |
0 | Create_ds/fineract-cn-postgresql/src/test/java/org/apache/fineract/cn/postgresql | Create_ds/fineract-cn-postgresql/src/test/java/org/apache/fineract/cn/postgresql/util/LocalDateConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.postgresql.util;
import org.junit.Assert;
import org.junit.Test;
import java.sql.Date;
import java.time.LocalDate;
public class LocalDateConverterTest {
public LocalDateConverterTest() {
super();
}
@Test
public void shouldConvertLocalDate() {
final LocalDateConverter converter = new LocalDateConverter();
final LocalDate expected = LocalDate.of(2017, 1, 1);
final Date dbDate = converter.convertToDatabaseColumn(expected);
final LocalDate result = converter.convertToEntityAttribute(dbDate);
Assert.assertEquals(expected, result);
}
}
| 2,829 |
0 | Create_ds/fineract-cn-postgresql/src/test/java/org/apache/fineract/cn/postgresql | Create_ds/fineract-cn-postgresql/src/test/java/org/apache/fineract/cn/postgresql/util/JdbcUrlBuilderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.postgresql.util;
import org.junit.Assert;
import org.junit.Test;
/*
* PostgreSQL URL Formats
* https://jdbc.postgresql.org/documentation/head/connect.html
*/
public class JdbcUrlBuilderTest {
public JdbcUrlBuilderTest() {
super();
}
@Test
public void shouldCreatePostgresqlUrl() {
final String expectedJdbcUrl = "jdbc:postgresql://localhost:5432/comp_test";
final String postgresqlJdbcUrl = JdbcUrlBuilder
.create(JdbcUrlBuilder.DatabaseType.POSTGRESQL)
.host("localhost")
.port("5432")
.instanceName("comp_test")
.build();
Assert.assertEquals(expectedJdbcUrl, postgresqlJdbcUrl);
}
@Test
public void shouldCreatePostgresqlUrlNoInstance() {
final String expectedJdbcUrl = "jdbc:postgresql://localhost:5432";
final String postgresqlJdbcUrl = JdbcUrlBuilder
.create(JdbcUrlBuilder.DatabaseType.POSTGRESQL)
.host("localhost")
.port("5432").build();
Assert.assertEquals(expectedJdbcUrl, postgresqlJdbcUrl);
}
@Test
public void shouldCreatePostgresqlReplicationUrl() {
final String expectedJdbcUrl = "jdbc:postgresql:replication://localhost:5432,anotherhost:5432/comp_test";
final String mariaDbJdbcUrl = JdbcUrlBuilder
.create(JdbcUrlBuilder.DatabaseType.POSTGRESQL)
.host("localhost, anotherhost")
.port("5432")
.instanceName("comp_test")
.build();
Assert.assertEquals(expectedJdbcUrl, mariaDbJdbcUrl);
}
} | 2,830 |
0 | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql/util/PostgreSQLConstants.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.postgresql.util;
public interface PostgreSQLConstants {
String LOGGER_NAME = "postgresql-logger";
String POSTGRESQL_DRIVER_CLASS_PROP = "postgresql.driverClass";
String POSTGRESQL_DRIVER_CLASS_DEFAULT = "org.postgresql.Driver";
String POSTGRESQL_DATABASE_NAME_PROP = "postgresql.database";
String POSTGRESQL_DATABASE_NAME_DEFAULT = "seshat";
String POSTGRESQL_DATABASE_NAME = "postgres";
String POSTGRESQL_HOST_PROP = "postgresql.host";
String POSTGRESQL_HOST_DEFAULT = "localhost";
String POSTGRESQL_PORT_PROP = "postgresql.port";
String POSTGRESQL_PORT_DEFAULT = "5432";
String POSTGRESQL_USER_PROP = "postgresql.user";
String POSTGRESQL_USER_DEFAULT = "postgres";
String POSTGRESQL_PASSWORD_PROP = "postgresql.password";
String POSTGRESQL_PASSWORD_DEFAULT = "postgres";
String BONECP_IDLE_MAX_AGE_PROP = "bonecp.idleMaxAgeInMinutes";
String BONECP_IDLE_MAX_AGE_DEFAULT = "240";
String BONECP_IDLE_CONNECTION_TEST_PROP = "bonecp.idleConnectionTestPeriodInMinutes";
String BONECP_IDLE_CONNECTION_TEST_DEFAULT = "60";
String BONECP_MAX_CONNECTION_PARTITION_PROP = "bonecp.maxConnectionsPerPartition";
String BONECP_MAX_CONNECTION_PARTITION_DEFAULT = "16";
String BONECP_MIN_CONNECTION_PARTITION_PROP = "bonecp.minConnectionsPerPartition";
String BONECP_MIN_CONNECTION_PARTITION_DEFAULT = "4";
String BONECP_PARTITION_COUNT_PROP = "bonecp.partitionCount";
String BONECP_PARTITION_COUNT_DEFAULT = "2";
String BONECP_ACQUIRE_INCREMENT_PROP = "bonecp.acquireIncrement";
String BONECP_ACQUIRE_INCREMENT_DEFAULT = "4";
String BONECP_STATEMENT_CACHE_PROP = "bonecp.statementsCacheSize";
String BONECP_STATEMENT_CACHE_DEFAULT = "128";
}
| 2,831 |
0 | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql/util/LocalDateConverter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.postgresql.util;
import javax.persistence.AttributeConverter;
import javax.persistence.Converter;
import java.sql.Date;
import java.time.LocalDate;
@Converter
public class LocalDateConverter implements AttributeConverter<LocalDate, Date> {
public LocalDateConverter() {
super();
}
@Override
public Date convertToDatabaseColumn(final LocalDate attribute) {
if (attribute == null) {
return null;
} else {
return Date.valueOf(attribute);
}
}
@Override
public LocalDate convertToEntityAttribute(final Date dbData) {
if (dbData == null) {
return null;
} else {
return dbData.toLocalDate();
}
}
}
| 2,832 |
0 | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql/util/EclipseLinkConstants.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.postgresql.util;
public interface EclipseLinkConstants {
String ECLIPSE_LINK_SHOW_SQL = "eclipseLink.showSql";
String ECLIPSE_LINK_SHOW_SQL_DEFAULT = "false";
}
| 2,833 |
0 | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql/util/LocalDateTimeConverter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.postgresql.util;
import javax.persistence.AttributeConverter;
import javax.persistence.Converter;
import java.sql.Timestamp;
import java.time.LocalDateTime;
import org.apache.fineract.cn.lang.DateConverter;
@Converter
public class LocalDateTimeConverter implements AttributeConverter<LocalDateTime, Timestamp> {
public LocalDateTimeConverter() {
super();
}
@Override
public Timestamp convertToDatabaseColumn(final LocalDateTime attribute) {
if (attribute == null) {
return null;
} else {
return new Timestamp(DateConverter.toEpochMillis(attribute));
}
}
@Override
public LocalDateTime convertToEntityAttribute(final Timestamp dbData) {
if (dbData == null) {
return null;
} else {
return DateConverter.fromEpochMillis(dbData.getTime());
}
}
}
| 2,834 |
0 | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql/util/JdbcUrlBuilder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.postgresql.util;
public final class JdbcUrlBuilder {
private final DatabaseType type;
private String host;
private String port;
private String instanceName;
private JdbcUrlBuilder(final DatabaseType type) {
super();
this.type = type;
}
public static JdbcUrlBuilder create(final DatabaseType type) {
return new JdbcUrlBuilder(type);
}
public JdbcUrlBuilder host(final String host) {
this.host = host;
return this;
}
public JdbcUrlBuilder port(final String port) {
this.port = port;
return this;
}
public JdbcUrlBuilder instanceName(final String instanceName) {
this.instanceName = instanceName;
return this;
}
public String build() {
final String[] hostList = this.host.split(",");
switch (this.type) {
case POSTGRESQL:
final StringBuilder jdbcUrl = new StringBuilder();
final String jdbcProtocol = this.type.getSubProtocol() + (hostList.length > 1 ? "replication://" : "//");
jdbcUrl.append(jdbcProtocol);
for (int i = 0; i < hostList.length; i++) {
jdbcUrl.append(hostList[i].trim()).append(":").append(this.port);
if ((i + 1) < hostList.length) {
jdbcUrl.append(",");
}
}
if (this.instanceName != null) {
jdbcUrl.append("/").append(this.instanceName);
}
return jdbcUrl.toString();
default:
throw new IllegalArgumentException("Unknown database type '" + this.type.name() + "'");
}
}
public enum DatabaseType {
POSTGRESQL("jdbc:postgresql:");
private final String subProtocol;
DatabaseType(final String subProtocol) {
this.subProtocol = subProtocol;
}
String getSubProtocol() {
return this.subProtocol;
}
}
}
| 2,835 |
0 | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql/config/MetaDataSourceWrapper.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.postgresql.config;
import com.jolbox.bonecp.BoneCPDataSource;
/**
* @author Isaac Kamga
*/
public class MetaDataSourceWrapper {
private final BoneCPDataSource metaDataSource;
public MetaDataSourceWrapper(final BoneCPDataSource metaDataSource) {
this.metaDataSource = metaDataSource;
}
BoneCPDataSource getMetaDataSource() {
return metaDataSource;
}
} | 2,836 |
0 | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql/config/PostgreSQLTenantFreeJavaConfiguration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.postgresql.config;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import javax.sql.DataSource;
/**
* @author Isaac Kamga
*/
@SuppressWarnings("WeakerAccess")
@Configuration
@ConditionalOnProperty(prefix = "postgresql", name = "enabled", matchIfMissing = true)
public class PostgreSQLTenantFreeJavaConfiguration {
@Bean
public DataSource dataSource(final MetaDataSourceWrapper metaDataSource) {
return metaDataSource.getMetaDataSource();
}
} | 2,837 |
0 | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql/config/EnablePostgreSQL.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.postgresql.config;
import org.springframework.context.annotation.Import;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@SuppressWarnings({"WeakerAccess", "unused"})
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@Documented
@Inherited
@Import({PostgreSQLJavaConfigurationImportSelector.class})
public @interface EnablePostgreSQL {
boolean forTenantContext() default true;
}
| 2,838 |
0 | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql/config/PostgreSQLJavaConfiguration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.postgresql.config;
import com.jolbox.bonecp.BoneCPDataSource;
import org.apache.fineract.cn.postgresql.domain.FlywayFactoryBean;
import org.apache.fineract.cn.postgresql.util.JdbcUrlBuilder;
import org.apache.fineract.cn.lang.ApplicationName;
import org.apache.fineract.cn.lang.config.EnableApplicationName;
import org.apache.fineract.cn.postgresql.util.PostgreSQLConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.orm.jpa.EntityManagerFactoryBuilder;
import org.springframework.context.annotation.*;
import org.springframework.core.env.Environment;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.orm.jpa.vendor.AbstractJpaVendorAdapter;
import javax.sql.DataSource;
import java.util.Properties;
@SuppressWarnings("WeakerAccess")
@Configuration
@ConditionalOnProperty(prefix = "postgresql", name = "enabled", matchIfMissing = true)
@EnableApplicationName
@Import(EclipseLinkJpaConfiguration.class)
public class PostgreSQLJavaConfiguration {
private final Environment env;
@Autowired
protected PostgreSQLJavaConfiguration(Environment env) {
super();
this.env = env;
}
@Bean(name = PostgreSQLConstants.LOGGER_NAME)
public Logger logger() {
return LoggerFactory.getLogger(PostgreSQLConstants.LOGGER_NAME);
}
@Bean
public FlywayFactoryBean flywayFactoryBean(final ApplicationName applicationName) {
return new FlywayFactoryBean(applicationName);
}
@Bean
public MetaDataSourceWrapper metaDataSourceWrapper() {
final BoneCPDataSource boneCPDataSource = new BoneCPDataSource();
boneCPDataSource.setDriverClass(
this.env.getProperty(PostgreSQLConstants.POSTGRESQL_DRIVER_CLASS_PROP, PostgreSQLConstants.POSTGRESQL_DRIVER_CLASS_DEFAULT));
boneCPDataSource.setJdbcUrl(JdbcUrlBuilder
.create(JdbcUrlBuilder.DatabaseType.POSTGRESQL)
.host(this.env.getProperty(PostgreSQLConstants.POSTGRESQL_HOST_PROP, PostgreSQLConstants.POSTGRESQL_HOST_DEFAULT))
.port(this.env.getProperty(PostgreSQLConstants.POSTGRESQL_PORT_PROP, PostgreSQLConstants.POSTGRESQL_PORT_DEFAULT))
.instanceName(this.env.getProperty(PostgreSQLConstants.POSTGRESQL_DATABASE_NAME_PROP, PostgreSQLConstants.POSTGRESQL_DATABASE_NAME_DEFAULT))
.build());
boneCPDataSource.setUsername(
this.env.getProperty(PostgreSQLConstants.POSTGRESQL_USER_PROP, PostgreSQLConstants.POSTGRESQL_USER_DEFAULT));
boneCPDataSource.setPassword(
this.env.getProperty(PostgreSQLConstants.POSTGRESQL_PASSWORD_PROP, PostgreSQLConstants.POSTGRESQL_PASSWORD_DEFAULT));
boneCPDataSource.setIdleConnectionTestPeriodInMinutes(
Long.valueOf(this.env.getProperty(PostgreSQLConstants.BONECP_IDLE_CONNECTION_TEST_PROP, PostgreSQLConstants.BONECP_IDLE_CONNECTION_TEST_DEFAULT)));
boneCPDataSource.setIdleMaxAgeInMinutes(
Long.valueOf(this.env.getProperty(PostgreSQLConstants.BONECP_IDLE_MAX_AGE_PROP, PostgreSQLConstants.BONECP_IDLE_MAX_AGE_DEFAULT)));
boneCPDataSource.setMaxConnectionsPerPartition(
Integer.valueOf(this.env.getProperty(PostgreSQLConstants.BONECP_MAX_CONNECTION_PARTITION_PROP, PostgreSQLConstants.BONECP_MAX_CONNECTION_PARTITION_DEFAULT)));
boneCPDataSource.setMinConnectionsPerPartition(
Integer.valueOf(this.env.getProperty(PostgreSQLConstants.BONECP_MIN_CONNECTION_PARTITION_PROP, PostgreSQLConstants.BONECP_MIN_CONNECTION_PARTITION_DEFAULT)));
boneCPDataSource.setPartitionCount(
Integer.valueOf(this.env.getProperty(PostgreSQLConstants.BONECP_PARTITION_COUNT_PROP, PostgreSQLConstants.BONECP_PARTITION_COUNT_DEFAULT)));
boneCPDataSource.setAcquireIncrement(
Integer.valueOf(this.env.getProperty(PostgreSQLConstants.BONECP_ACQUIRE_INCREMENT_PROP, PostgreSQLConstants.BONECP_ACQUIRE_INCREMENT_DEFAULT)));
boneCPDataSource.setStatementsCacheSize(
Integer.valueOf(this.env.getProperty(PostgreSQLConstants.BONECP_STATEMENT_CACHE_PROP, PostgreSQLConstants.BONECP_STATEMENT_CACHE_DEFAULT)));
final Properties driverProperties = new Properties();
driverProperties.setProperty("useServerPrepStmts", "false");
boneCPDataSource.setDriverProperties(driverProperties);
return new MetaDataSourceWrapper(boneCPDataSource);
}
}
| 2,839 |
0 | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql/config/EclipseLinkJpaConfiguration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.postgresql.config;
import org.apache.fineract.cn.postgresql.util.EclipseLinkConstants;
import org.eclipse.persistence.config.BatchWriting;
import org.eclipse.persistence.config.PersistenceUnitProperties;
import org.eclipse.persistence.config.TargetDatabase;
import org.eclipse.persistence.logging.SessionLog;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.domain.EntityScan;
import org.springframework.boot.autoconfigure.orm.jpa.JpaBaseConfiguration;
import org.springframework.boot.autoconfigure.orm.jpa.JpaProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.dao.annotation.PersistenceExceptionTranslationPostProcessor;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.vendor.AbstractJpaVendorAdapter;
import org.springframework.orm.jpa.vendor.Database;
import org.springframework.orm.jpa.vendor.EclipseLinkJpaVendorAdapter;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import org.springframework.transaction.jta.JtaTransactionManager;
import javax.persistence.EntityManagerFactory;
import javax.sql.DataSource;
import java.util.HashMap;
import java.util.Map;
/**
* @author Ebenezer Graham
*/
@Configuration
@EnableTransactionManagement
@EntityScan({
"org.apache.fineract.cn.**.repository",
"org.apache.fineract.cn.postgresql.util"
})
public class EclipseLinkJpaConfiguration extends JpaBaseConfiguration {
@Value("#{new Boolean(${" + EclipseLinkConstants.ECLIPSE_LINK_SHOW_SQL + ":" + EclipseLinkConstants.ECLIPSE_LINK_SHOW_SQL_DEFAULT + "})}")
private Boolean eclipseLinkShowSql;
protected EclipseLinkJpaConfiguration(DataSource dataSource, JpaProperties properties, ObjectProvider<JtaTransactionManager> jtaTransactionManagerProvider) {
super(dataSource, properties, jtaTransactionManagerProvider);
}
@Bean
public PlatformTransactionManager transactionManager(EntityManagerFactory emf) {
final JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setEntityManagerFactory(emf);
return transactionManager;
}
@Bean
public PersistenceExceptionTranslationPostProcessor exceptionTranslation() {
return new PersistenceExceptionTranslationPostProcessor();
}
@Bean
protected AbstractJpaVendorAdapter createJpaVendorAdapter() {
EclipseLinkJpaVendorAdapter vendorAdapter = new EclipseLinkJpaVendorAdapter();
vendorAdapter.setDatabasePlatform("org.eclipse.persistence.platform.database.PostgreSQLPlatform");
vendorAdapter.setShowSql(eclipseLinkShowSql);
vendorAdapter.setDatabase(Database.POSTGRESQL);
vendorAdapter.setGenerateDdl(false);
return vendorAdapter;
}
@Bean
protected Map<String, Object> getVendorProperties() {
HashMap<String, Object> properties = new HashMap<>();
properties.put(PersistenceUnitProperties.WEAVING, "static");
properties.put(PersistenceUnitProperties.WEAVING_EAGER, "true");
properties.put(PersistenceUnitProperties.TARGET_DATABASE, TargetDatabase.PostgreSQL);
properties.put(PersistenceUnitProperties.BATCH_WRITING, BatchWriting.JDBC);
properties.put(PersistenceUnitProperties.LOGGING_LEVEL, SessionLog.ALL_LABEL);// Todo: Reduce log level after test
properties.put(PersistenceUnitProperties.LOGGING_PARAMETERS, "true");
return properties;
}
}
| 2,840 |
0 | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql/config/PostgreSQLJavaConfigurationImportSelector.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.postgresql.config;
import org.springframework.context.annotation.ImportSelector;
import org.springframework.core.type.AnnotationMetadata;
import java.util.HashSet;
import java.util.Set;
/**
* @author Isaac Kamga
*/
class PostgreSQLJavaConfigurationImportSelector implements ImportSelector {
@Override
public String[] selectImports(AnnotationMetadata importingClassMetadata) {
final boolean forTenantContext = (boolean)importingClassMetadata
.getAnnotationAttributes(EnablePostgreSQL.class.getTypeName())
.get("forTenantContext");
final Set<Class> classesToImport = new HashSet<>();
final String prop = System.getProperty("postgresql.enabled");
if (prop == null || "true".equals(prop)) {
classesToImport.add(PostgreSQLJavaConfiguration.class);
if (forTenantContext) {
classesToImport.add(PostgreSQLTenantBasedJavaConfiguration.class);
}
else {
classesToImport.add(PostgreSQLTenantFreeJavaConfiguration.class);
}
}
return classesToImport.stream().map(Class::getCanonicalName).toArray(String[]::new);
}
} | 2,841 |
0 | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql/config/PostgreSQLTenantBasedJavaConfiguration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.postgresql.config;
import org.apache.fineract.cn.postgresql.domain.ContextAwareRoutingDataSource;
import org.apache.fineract.cn.postgresql.util.JdbcUrlBuilder;
import org.apache.fineract.cn.postgresql.util.PostgreSQLConstants;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import javax.sql.DataSource;
import java.util.HashMap;
@SuppressWarnings("WeakerAccess")
@Configuration
@ConditionalOnProperty(prefix = "postgresql", name = "enabled", matchIfMissing = true)
public class PostgreSQLTenantBasedJavaConfiguration {
@Bean
public DataSource dataSource(@Qualifier(PostgreSQLConstants.LOGGER_NAME) final Logger logger,
final MetaDataSourceWrapper metaDataSource) {
final ContextAwareRoutingDataSource dataSource = new ContextAwareRoutingDataSource(logger, JdbcUrlBuilder.DatabaseType.POSTGRESQL);
dataSource.setMetaDataSource(metaDataSource.getMetaDataSource());
final HashMap<Object, Object> tenantDataSources = new HashMap<>();
dataSource.setTargetDataSources(tenantDataSources);
return dataSource;
}
}
| 2,842 |
0 | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql/domain/ContextAwareRoutingDataSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.postgresql.domain;
import com.jolbox.bonecp.BoneCPDataSource;
import org.apache.fineract.cn.postgresql.util.JdbcUrlBuilder;
import org.apache.fineract.cn.postgresql.util.PostgreSQLConstants;
import org.apache.fineract.cn.lang.TenantContextHolder;
import org.apache.fineract.cn.postgresql.util.JdbcUrlBuilder.DatabaseType;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.concurrent.ConcurrentHashMap;
public final class ContextAwareRoutingDataSource extends AbstractRoutingDataSource {
private final Logger logger;
private final DatabaseType type;
private final ConcurrentHashMap<String, DataSource> dynamicDataSources;
private DataSource metaDataSource;
public ContextAwareRoutingDataSource(@Qualifier(PostgreSQLConstants.LOGGER_NAME) final Logger logger,
final DatabaseType type) {
super();
this.logger = logger;
this.type = type;
this.dynamicDataSources = new ConcurrentHashMap<>();
}
public void setMetaDataSource(final DataSource metaDataSource) {
this.metaDataSource = metaDataSource;
super.setDefaultTargetDataSource(metaDataSource);
}
@Override
protected Object determineCurrentLookupKey() {
return TenantContextHolder.identifier().orElse(null);
}
@Override
protected DataSource determineTargetDataSource() {
if (!TenantContextHolder.identifier().isPresent()) {
this.logger.warn("Tenant context not available.");
return super.determineTargetDataSource();
}
final String currentLookupKey = this.determineCurrentLookupKey().toString();
this.dynamicDataSources.computeIfAbsent(currentLookupKey, (key) -> {
this.logger.info("Creating new dynamic data source for {}.", key);
final Tenant tenant = new Tenant(key);
this.readAdditionalTenantInformation(tenant);
final BoneCPDataSource tenantDataSource = new BoneCPDataSource();
tenantDataSource.setDriverClass(tenant.getDriverClass());
tenantDataSource.setJdbcUrl(JdbcUrlBuilder
.create(this.type)
.host(tenant.getHost())
.port(tenant.getPort())
.instanceName(tenant.getDatabaseName())
.build());
tenantDataSource.setUsername(tenant.getUser());
tenantDataSource.setPassword(tenant.getPassword());
final BoneCPDataSource boneCpMetaDataSource = (BoneCPDataSource) this.metaDataSource;
tenantDataSource.setIdleConnectionTestPeriodInMinutes(boneCpMetaDataSource.getIdleConnectionTestPeriodInMinutes());
tenantDataSource.setIdleMaxAgeInMinutes(boneCpMetaDataSource.getIdleMaxAgeInMinutes());
tenantDataSource.setMaxConnectionsPerPartition(boneCpMetaDataSource.getMaxConnectionsPerPartition());
tenantDataSource.setMinConnectionsPerPartition(boneCpMetaDataSource.getMinConnectionsPerPartition());
tenantDataSource.setPartitionCount(boneCpMetaDataSource.getPartitionCount());
tenantDataSource.setAcquireIncrement(boneCpMetaDataSource.getAcquireIncrement());
tenantDataSource.setStatementsCacheSize(boneCpMetaDataSource.getStatementsCacheSize());
return tenantDataSource;
});
return this.dynamicDataSources.get(currentLookupKey);
}
private void readAdditionalTenantInformation(final Tenant tenant) {
this.logger.info("Reading additional information for {}.", tenant.getIdentifier());
@SuppressWarnings({"SqlDialectInspection", "SqlNoDataSourceInspection"})
final String query = "SELECT driver_class, database_name, host, port, a_user, pwd FROM tenants WHERE identifier = ?";
try (final Connection connection = this.metaDataSource.getConnection()) {
try (final PreparedStatement preparedStatement = connection.prepareStatement(query)) {
preparedStatement.setString(1, tenant.getIdentifier());
final ResultSet resultSet = preparedStatement.executeQuery();
if (resultSet.next()) {
tenant.setDriverClass(resultSet.getString("driver_class"));
tenant.setDatabaseName(resultSet.getString("database_name"));
tenant.setHost(resultSet.getString("host"));
tenant.setPort(resultSet.getString("port"));
tenant.setUser(resultSet.getString("a_user"));
tenant.setPassword(resultSet.getString("pwd"));
}
}
} catch (SQLException ex) {
throw new IllegalArgumentException("Could not fetch information for tenant '" + tenant.getIdentifier() + "'", ex);
}
}
}
| 2,843 |
0 | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql/domain/Tenant.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.postgresql.domain;
@SuppressWarnings("WeakerAccess")
public final class Tenant {
private final String identifier;
private String driverClass;
private String databaseName;
private String host;
private String port;
private String user;
private String password;
public Tenant(final String identifier) {
super();
this.identifier = identifier;
}
public String getIdentifier() {
return identifier;
}
public String getDriverClass() {
return driverClass;
}
public void setDriverClass(String driverClass) {
this.driverClass = driverClass;
}
public String getDatabaseName() {
return databaseName;
}
public void setDatabaseName(String databaseName) {
this.databaseName = databaseName;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public String getPort() {
return port;
}
public void setPort(String port) {
this.port = port;
}
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
public String getPassword() { return password; }
public void setPassword(String password) { this.password = password; }
}
| 2,844 |
0 | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql | Create_ds/fineract-cn-postgresql/src/main/java/org/apache/fineract/cn/postgresql/domain/FlywayFactoryBean.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.postgresql.domain;
import org.apache.fineract.cn.lang.ApplicationName;
import org.flywaydb.core.Flyway;
import javax.sql.DataSource;
public class FlywayFactoryBean {
private final ApplicationName applicationName;
public FlywayFactoryBean(final ApplicationName applicationName) {
super();
this.applicationName = applicationName;
}
public Flyway create(final DataSource dataSource) {
final Flyway flyway = new Flyway();
flyway.setDataSource(dataSource);
flyway.setLocations("db/migrations/postgresql");
flyway.setTable(this.applicationName.getServiceName() + "_schema_version");
flyway.setBaselineOnMigrate(true);
flyway.setBaselineVersionAsString("0");
return flyway;
}
}
| 2,845 |
0 | Create_ds/protoparser/src/test/java/com/squareup | Create_ds/protoparser/src/test/java/com/squareup/protoparser/DataTypeTest.java | package com.squareup.protoparser;
import com.squareup.protoparser.DataType.MapType;
import com.squareup.protoparser.DataType.NamedType;
import org.junit.Test;
import static com.squareup.protoparser.DataType.ScalarType.ANY;
import static com.squareup.protoparser.DataType.ScalarType.BOOL;
import static com.squareup.protoparser.DataType.ScalarType.BYTES;
import static com.squareup.protoparser.DataType.ScalarType.DOUBLE;
import static com.squareup.protoparser.DataType.ScalarType.FIXED32;
import static com.squareup.protoparser.DataType.ScalarType.FIXED64;
import static com.squareup.protoparser.DataType.ScalarType.FLOAT;
import static com.squareup.protoparser.DataType.ScalarType.INT32;
import static com.squareup.protoparser.DataType.ScalarType.INT64;
import static com.squareup.protoparser.DataType.ScalarType.SFIXED32;
import static com.squareup.protoparser.DataType.ScalarType.SFIXED64;
import static com.squareup.protoparser.DataType.ScalarType.SINT32;
import static com.squareup.protoparser.DataType.ScalarType.SINT64;
import static com.squareup.protoparser.DataType.ScalarType.STRING;
import static com.squareup.protoparser.DataType.ScalarType.UINT32;
import static com.squareup.protoparser.DataType.ScalarType.UINT64;
import static org.assertj.core.api.Assertions.assertThat;
public final class DataTypeTest {
@Test public void scalarToString() {
assertThat(ANY.toString()).isEqualTo("any");
assertThat(BOOL.toString()).isEqualTo("bool");
assertThat(BYTES.toString()).isEqualTo("bytes");
assertThat(DOUBLE.toString()).isEqualTo("double");
assertThat(FLOAT.toString()).isEqualTo("float");
assertThat(FIXED32.toString()).isEqualTo("fixed32");
assertThat(FIXED64.toString()).isEqualTo("fixed64");
assertThat(INT32.toString()).isEqualTo("int32");
assertThat(INT64.toString()).isEqualTo("int64");
assertThat(SFIXED32.toString()).isEqualTo("sfixed32");
assertThat(SFIXED64.toString()).isEqualTo("sfixed64");
assertThat(SINT32.toString()).isEqualTo("sint32");
assertThat(SINT64.toString()).isEqualTo("sint64");
assertThat(STRING.toString()).isEqualTo("string");
assertThat(UINT32.toString()).isEqualTo("uint32");
assertThat(UINT64.toString()).isEqualTo("uint64");
}
@Test public void mapToString() {
assertThat(MapType.create(STRING, STRING).toString()).isEqualTo("map<string, string>");
}
@Test public void namedToString() {
assertThat(NamedType.create("test").toString()).isEqualTo("test");
assertThat(NamedType.create("nested.nested").toString()).isEqualTo("nested.nested");
}
}
| 2,846 |
0 | Create_ds/protoparser/src/test/java/com/squareup | Create_ds/protoparser/src/test/java/com/squareup/protoparser/TestUtils.java | package com.squareup.protoparser;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
final class TestUtils {
static Map<String, Object> map(Object... keysAndValues) {
Map<String, Object> result = new LinkedHashMap<>();
for (int i = 0; i < keysAndValues.length; i += 2) {
result.put((String) keysAndValues[i], keysAndValues[i + 1]);
}
return result;
}
@SafeVarargs
static <T> List<T> list(T... values) {
return Arrays.asList(values);
}
private TestUtils() {
throw new AssertionError("No instances.");
}
}
| 2,847 |
0 | Create_ds/protoparser/src/test/java/com/squareup | Create_ds/protoparser/src/test/java/com/squareup/protoparser/ProtoFileTest.java | package com.squareup.protoparser;
import com.squareup.protoparser.OptionElement.Kind;
import java.util.Arrays;
import java.util.Collections;
import org.junit.Test;
import static com.squareup.protoparser.ProtoFile.MAX_TAG_VALUE;
import static com.squareup.protoparser.ProtoFile.MIN_TAG_VALUE;
import static com.squareup.protoparser.ProtoFile.Syntax.PROTO_2;
import static com.squareup.protoparser.ProtoFile.isValidTag;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
public class ProtoFileTest {
@Test public void nullBuilderValuesThrow() {
try {
ProtoFile.builder(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("filePath == null");
}
try {
ProtoFile.builder("test.proto").packageName(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("packageName == null");
}
try {
ProtoFile.builder("test.proto").syntax(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("syntax == null");
}
try {
ProtoFile.builder("test.proto").addDependency(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("dependency == null");
}
try {
ProtoFile.builder("test.proto").addDependencies(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("dependencies == null");
}
try {
ProtoFile.builder("test.proto").addDependencies(Collections.<String>singleton(null));
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("dependency == null");
}
try {
ProtoFile.builder("test.proto").addPublicDependency(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("dependency == null");
}
try {
ProtoFile.builder("test.proto").addPublicDependencies(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("dependencies == null");
}
try {
ProtoFile.builder("test.proto").addPublicDependencies(Collections.<String>singleton(null));
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("dependency == null");
}
try {
ProtoFile.builder("test.proto").addType(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("type == null");
}
try {
ProtoFile.builder("test.proto").addTypes(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("types == null");
}
try {
ProtoFile.builder("test.proto").addTypes(Collections.<TypeElement>singleton(null));
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("type == null");
}
try {
ProtoFile.builder("test.proto").addService(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("service == null");
}
try {
ProtoFile.builder("test.proto").addServices(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("services == null");
}
try {
ProtoFile.builder("test.proto").addServices(Collections.<ServiceElement>singleton(null));
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("service == null");
}
try {
ProtoFile.builder("test.proto").addExtendDeclaration(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("extend == null");
}
try {
ProtoFile.builder("test.proto").addExtendDeclarations(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("extendDeclarations == null");
}
try {
ProtoFile.builder("test.proto").addExtendDeclarations(
Collections.<ExtendElement>singleton(null));
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("extend == null");
}
try {
ProtoFile.builder("test.proto").addOption(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("option == null");
}
try {
ProtoFile.builder("test.proto").addOptions(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("options == null");
}
try {
ProtoFile.builder("test.proto").addOptions(Collections.<OptionElement>singleton(null));
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("option == null");
}
}
@Test public void tagValueValidation() {
assertThat(isValidTag(MIN_TAG_VALUE - 1)).isFalse(); // Less than minimum.
assertThat(isValidTag(MIN_TAG_VALUE)).isTrue();
assertThat(isValidTag(1234)).isTrue();
assertThat(isValidTag(19222)).isFalse(); // Reserved range.
assertThat(isValidTag(2319573)).isTrue();
assertThat(isValidTag(MAX_TAG_VALUE)).isTrue();
assertThat(isValidTag(MAX_TAG_VALUE + 1)).isFalse(); // Greater than maximum.
}
@Test public void emptyToSchema() {
ProtoFile file = ProtoFile.builder("file.proto").build();
String expected = "// file.proto\n";
assertThat(file.toSchema()).isEqualTo(expected);
}
@Test public void emptyWithPackageToSchema() {
ProtoFile file = ProtoFile.builder("file.proto").packageName("example.simple").build();
String expected = ""
+ "// file.proto\n"
+ "package example.simple;\n";
assertThat(file.toSchema()).isEqualTo(expected);
}
@Test public void simpleToSchema() {
TypeElement element = MessageElement.builder().name("Message").build();
ProtoFile file = ProtoFile.builder("file.proto").addType(element).build();
String expected = ""
+ "// file.proto\n"
+ "\n"
+ "message Message {}\n";
assertThat(file.toSchema()).isEqualTo(expected);
}
@Test public void simpleWithImportsToSchema() {
TypeElement element = MessageElement.builder().name("Message").build();
ProtoFile file =
ProtoFile.builder("file.proto").addDependency("example.other").addType(element).build();
String expected = ""
+ "// file.proto\n"
+ "\n"
+ "import \"example.other\";\n"
+ "\n"
+ "message Message {}\n";
assertThat(file.toSchema()).isEqualTo(expected);
}
@Test public void addMultipleDependencies() {
TypeElement element = MessageElement.builder().name("Message").build();
ProtoFile file = ProtoFile.builder("file.proto")
.addDependencies(Arrays.asList("example.other", "example.another"))
.addType(element)
.build();
assertThat(file.dependencies()).hasSize(2);
}
@Test public void simpleWithPublicImportsToSchema() {
TypeElement element = MessageElement.builder().name("Message").build();
ProtoFile file = ProtoFile.builder("file.proto")
.addPublicDependency("example.other")
.addType(element)
.build();
String expected = ""
+ "// file.proto\n"
+ "\n"
+ "import public \"example.other\";\n"
+ "\n"
+ "message Message {}\n";
assertThat(file.toSchema()).isEqualTo(expected);
}
@Test public void addMultiplePublicDependencies() {
TypeElement element = MessageElement.builder().name("Message").build();
ProtoFile file = ProtoFile.builder("file.proto")
.addPublicDependencies(Arrays.asList("example.other", "example.another"))
.addType(element)
.build();
assertThat(file.publicDependencies()).hasSize(2);
}
@Test public void simpleWithBothImportsToSchema() {
TypeElement element = MessageElement.builder().name("Message").build();
ProtoFile file = ProtoFile.builder("file.proto")
.addDependency("example.thing")
.addPublicDependency("example.other")
.addType(element)
.build();
String expected = ""
+ "// file.proto\n"
+ "\n"
+ "import \"example.thing\";\n"
+ "import public \"example.other\";\n"
+ "\n"
+ "message Message {}\n";
assertThat(file.toSchema()).isEqualTo(expected);
}
@Test public void simpleWithServicesToSchema() {
TypeElement element = MessageElement.builder().name("Message").build();
ServiceElement service = ServiceElement.builder().name("Service").build();
ProtoFile file = ProtoFile.builder("file.proto").addType(element).addService(service).build();
String expected = ""
+ "// file.proto\n"
+ "\n"
+ "message Message {}\n"
+ "\n"
+ "service Service {}\n";
assertThat(file.toSchema()).isEqualTo(expected);
}
@Test public void addMultipleServices() {
ServiceElement service1 = ServiceElement.builder().name("Service1").build();
ServiceElement service2 = ServiceElement.builder().name("Service2").build();
ProtoFile file = ProtoFile.builder("file.proto")
.addServices(Arrays.asList(service1, service2))
.build();
assertThat(file.services()).hasSize(2);
}
@Test public void simpleWithOptionsToSchema() {
TypeElement element = MessageElement.builder().name("Message").build();
OptionElement option = OptionElement.create("kit", Kind.STRING, "kat");
ProtoFile file = ProtoFile.builder("file.proto").addOption(option).addType(element).build();
String expected = ""
+ "// file.proto\n"
+ "\n"
+ "option kit = \"kat\";\n"
+ "\n"
+ "message Message {}\n";
assertThat(file.toSchema()).isEqualTo(expected);
}
@Test public void addMultipleOptions() {
TypeElement element = MessageElement.builder().name("Message").build();
OptionElement kitKat = OptionElement.create("kit", Kind.STRING, "kat");
OptionElement fooBar = OptionElement.create("foo", Kind.STRING, "bar");
ProtoFile file = ProtoFile.builder("file.proto")
.addOptions(Arrays.asList(kitKat, fooBar))
.addType(element)
.build();
assertThat(file.options()).hasSize(2);
}
@Test public void simpleWithExtendsToSchema() {
ProtoFile file = ProtoFile.builder("file.proto")
.addExtendDeclaration(ExtendElement.builder().name("Extend").build())
.addType(MessageElement.builder().name("Message").build())
.build();
String expected = ""
+ "// file.proto\n"
+ "\n"
+ "message Message {}\n"
+ "\n"
+ "extend Extend {}\n";
assertThat(file.toSchema()).isEqualTo(expected);
}
@Test public void addMultipleExtends() {
ExtendElement extend1 = ExtendElement.builder().name("Extend1").build();
ExtendElement extend2 = ExtendElement.builder().name("Extend2").build();
ProtoFile file = ProtoFile.builder("file.proto")
.addExtendDeclarations(Arrays.asList(extend1, extend2))
.build();
assertThat(file.extendDeclarations()).hasSize(2);
}
@Test public void multipleEverythingToSchema() {
TypeElement element1 = MessageElement.builder()
.name("Message1")
.qualifiedName("example.simple.Message1")
.build();
TypeElement element2 = MessageElement.builder()
.name("Message2")
.qualifiedName("example.simple.Message2")
.build();
ExtendElement extend1 = ExtendElement.builder()
.name("Extend1")
.qualifiedName("example.simple.Extend1")
.build();
ExtendElement extend2 = ExtendElement.builder()
.name("Extend2")
.qualifiedName("example.simple.Extend2")
.build();
OptionElement option1 = OptionElement.create("kit", Kind.STRING, "kat");
OptionElement option2 = OptionElement.create("foo", Kind.STRING, "bar");
ServiceElement service1 = ServiceElement.builder()
.name("Service1")
.qualifiedName("example.simple.Service1")
.build();
ServiceElement service2 = ServiceElement.builder()
.name("Service2")
.qualifiedName("example.simple.Service2")
.build();
ProtoFile file = ProtoFile.builder("file.proto")
.packageName("example.simple")
.addDependency("example.thing")
.addPublicDependency("example.other")
.addType(element1)
.addType(element2)
.addService(service1)
.addService(service2)
.addExtendDeclaration(extend1)
.addExtendDeclaration(extend2)
.addOption(option1)
.addOption(option2)
.build();
String expected = ""
+ "// file.proto\n"
+ "package example.simple;\n"
+ "\n"
+ "import \"example.thing\";\n"
+ "import public \"example.other\";\n"
+ "\n"
+ "option kit = \"kat\";\n"
+ "option foo = \"bar\";\n"
+ "\n"
+ "message Message1 {}\n"
+ "message Message2 {}\n"
+ "\n"
+ "extend Extend1 {}\n"
+ "extend Extend2 {}\n"
+ "\n"
+ "service Service1 {}\n"
+ "service Service2 {}\n";
assertThat(file.toSchema()).isEqualTo(expected);
// Re-parse the expected string into a ProtoFile and ensure they're equal.
ProtoFile parsed = ProtoParser.parse("file.proto", expected);
assertThat(parsed).isEqualTo(file);
}
@Test public void syntaxToSchema() {
TypeElement element = MessageElement.builder().name("Message").build();
ProtoFile file = ProtoFile.builder("file.proto").syntax(PROTO_2).addType(element).build();
String expected = ""
+ "// file.proto\n"
+ "syntax \"proto2\";\n"
+ "\n"
+ "message Message {}\n";
assertThat(file.toSchema()).isEqualTo(expected);
}
}
| 2,848 |
0 | Create_ds/protoparser/src/test/java/com/squareup | Create_ds/protoparser/src/test/java/com/squareup/protoparser/ExtendElementTest.java | package com.squareup.protoparser;
import java.util.Arrays;
import java.util.Collections;
import org.junit.Test;
import static com.squareup.protoparser.DataType.ScalarType.STRING;
import static com.squareup.protoparser.FieldElement.Label.REQUIRED;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
public class ExtendElementTest {
@Test public void nameRequired() {
try {
ExtendElement.builder().qualifiedName("Test").build();
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("name == null");
}
}
@Test public void nameSetsQualifiedName() {
ExtendElement test = ExtendElement.builder().name("Test").build();
assertThat(test.name()).isEqualTo("Test");
assertThat(test.qualifiedName()).isEqualTo("Test");
}
@Test public void nullBuilderValuesThrow() {
try {
ExtendElement.builder().name(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("name == null");
}
try {
ExtendElement.builder().qualifiedName(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("qualifiedName == null");
}
try {
ExtendElement.builder().documentation(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("documentation == null");
}
try {
ExtendElement.builder().addField(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("field == null");
}
try {
ExtendElement.builder().addFields(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("fields == null");
}
try {
ExtendElement.builder().addFields(Collections.<FieldElement>singleton(null));
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("field == null");
}
}
@Test public void emptyToSchema() {
ExtendElement extend = ExtendElement.builder().name("Name").build();
String expected = "extend Name {}\n";
assertThat(extend.toSchema()).isEqualTo(expected);
}
@Test public void simpleToSchema() {
ExtendElement extend = ExtendElement.builder()
.name("Name")
.addField(FieldElement.builder().label(REQUIRED).type(STRING).name("name").tag(1).build())
.build();
String expected = ""
+ "extend Name {\n"
+ " required string name = 1;\n"
+ "}\n";
assertThat(extend.toSchema()).isEqualTo(expected);
}
@Test public void addMultipleFields() {
FieldElement firstName =
FieldElement.builder().label(REQUIRED).type(STRING).name("first_name").tag(1).build();
FieldElement lastName =
FieldElement.builder().label(REQUIRED).type(STRING).name("last_name").tag(2).build();
ExtendElement extend = ExtendElement.builder()
.name("Name")
.addFields(Arrays.asList(firstName, lastName))
.build();
assertThat(extend.fields()).hasSize(2);
}
@Test public void simpleWithDocumentationToSchema() {
ExtendElement extend = ExtendElement.builder()
.name("Name")
.documentation("Hello")
.addField(FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name")
.tag(1)
.build())
.build();
String expected = ""
+ "// Hello\n"
+ "extend Name {\n"
+ " required string name = 1;\n"
+ "}\n";
assertThat(extend.toSchema()).isEqualTo(expected);
}
@Test public void duplicateTagValueThrows() {
FieldElement field1 = FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name1")
.tag(1)
.build();
FieldElement field2 = FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name2")
.tag(1)
.build();
try {
ExtendElement.builder()
.name("Extend")
.qualifiedName("example.Extend")
.addField(field1)
.addField(field2)
.build();
fail();
} catch (IllegalStateException e) {
assertThat(e).hasMessage("Duplicate tag 1 in example.Extend");
}
}
}
| 2,849 |
0 | Create_ds/protoparser/src/test/java/com/squareup | Create_ds/protoparser/src/test/java/com/squareup/protoparser/OptionElementTest.java | package com.squareup.protoparser;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import static com.squareup.protoparser.OptionElement.Kind.BOOLEAN;
import static com.squareup.protoparser.OptionElement.Kind.LIST;
import static com.squareup.protoparser.OptionElement.Kind.MAP;
import static com.squareup.protoparser.OptionElement.Kind.OPTION;
import static com.squareup.protoparser.OptionElement.Kind.STRING;
import static com.squareup.protoparser.TestUtils.list;
import static com.squareup.protoparser.TestUtils.map;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.data.MapEntry.entry;
import static org.junit.Assert.fail;
public class OptionElementTest {
@Test public void nullNameThrows() {
try {
OptionElement.create(null, STRING, "Test");
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("name == null");
}
}
@Test public void nullValueThrows() {
try {
OptionElement.create("test", STRING, null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("value == null");
}
}
@Test public void simpleToSchema() {
OptionElement option = OptionElement.create("foo", STRING, "bar");
String expected = "foo = \"bar\"";
assertThat(option.toSchema()).isEqualTo(expected);
}
@Test public void nestedToSchema() {
OptionElement option =
OptionElement.create("foo.boo", OPTION, OptionElement.create("bar", STRING, "baz"), true);
String expected = "(foo.boo).bar = \"baz\"";
assertThat(option.toSchema()).isEqualTo(expected);
}
@Test public void listToSchema() {
OptionElement option = OptionElement.create("foo", LIST,
list(OptionElement.create("ping", STRING, "pong", true),
OptionElement.create("kit", STRING, "kat")), true);
String expected = ""
+ "(foo) = [\n"
+ " (ping) = \"pong\",\n"
+ " kit = \"kat\"\n"
+ "]";
assertThat(option.toSchema()).isEqualTo(expected);
}
@Test public void mapToSchema() {
OptionElement option =
OptionElement.create("foo", MAP, map("ping", "pong", "kit", list("kat", "kot")));
String expected = ""
+ "foo = {\n"
+ " ping: \"pong\",\n"
+ " kit: [\n"
+ " \"kat\",\n"
+ " \"kot\"\n"
+ " ]\n"
+ "}";
assertThat(option.toSchema()).isEqualTo(expected);
}
@Test public void booleanToSchema() {
OptionElement option = OptionElement.create("foo", BOOLEAN, "false");
String expected = "foo = false";
assertThat(option.toSchema()).isEqualTo(expected);
}
@Test public void optionListToMap() {
List<OptionElement> options = list( //
OptionElement.create("foo", STRING, "bar"), //
OptionElement.create("ping", LIST, list( //
OptionElement.create("kit", STRING, "kat"), //
OptionElement.create("tic", STRING, "tac"), //
OptionElement.create("up", STRING, "down") //
)), //
OptionElement.create("wire", MAP, map( //
"omar", "little", //
"proposition", "joe" //
)), //
OptionElement.create("nested.option", OPTION, OptionElement.create("one", STRING, "two")), //
OptionElement.create("nested.option", OPTION, OptionElement.create("three", STRING, "four")) //
);
Map<String, Object> optionMap = OptionElement.optionsAsMap(options);
assertThat(optionMap).contains( //
entry("foo", "bar"), //
entry("ping", list( //
OptionElement.create("kit", STRING, "kat"), //
OptionElement.create("tic", STRING, "tac"), //
OptionElement.create("up", STRING, "down") //
)), //
entry("wire", map( //
"omar", "little", //
"proposition", "joe" //
)), //
entry("nested.option", map( //
"one", "two", //
"three", "four" //
)) //
);
}
@Test public void findInList() {
OptionElement one = OptionElement.create("one", STRING, "1");
OptionElement two = OptionElement.create("two", STRING, "2");
OptionElement three = OptionElement.create("three", STRING, "3");
List<OptionElement> options = list(one, two, three);
assertThat(OptionElement.findByName(options, "one")).isSameAs(one);
assertThat(OptionElement.findByName(options, "two")).isSameAs(two);
assertThat(OptionElement.findByName(options, "three")).isSameAs(three);
}
@Test public void findInListMissing() {
OptionElement one = OptionElement.create("one", STRING, "1");
OptionElement two = OptionElement.create("two", STRING, "2");
List<OptionElement> options = list(one, two);
assertThat(OptionElement.findByName(options, "three")).isNull();
}
@Test public void findInListDuplicate() {
OptionElement one = OptionElement.create("one", STRING, "1");
OptionElement two = OptionElement.create("two", STRING, "2");
List<OptionElement> options = list(one, two, one);
try {
OptionElement.findByName(options, "one");
fail();
} catch (IllegalStateException e) {
assertThat(e).hasMessage("Multiple options match name: one");
}
}
}
| 2,850 |
0 | Create_ds/protoparser/src/test/java/com/squareup | Create_ds/protoparser/src/test/java/com/squareup/protoparser/FieldElementTest.java | package com.squareup.protoparser;
import com.squareup.protoparser.DataType.NamedType;
import com.squareup.protoparser.OptionElement.Kind;
import java.util.Arrays;
import java.util.Collections;
import org.junit.Test;
import static com.squareup.protoparser.DataType.ScalarType.STRING;
import static com.squareup.protoparser.FieldElement.Label.OPTIONAL;
import static com.squareup.protoparser.FieldElement.Label.REQUIRED;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
public final class FieldElementTest {
@Test public void field() {
FieldElement field = FieldElement.builder()
.label(OPTIONAL)
.type(NamedType.create("CType"))
.name("ctype")
.tag(1)
.addOption(OptionElement.create("default", Kind.ENUM, "TEST"))
.addOption(OptionElement.create("deprecated", Kind.BOOLEAN, "true"))
.build();
assertThat(field.isDeprecated()).isTrue();
assertThat(field.getDefault().value()).isEqualTo("TEST");
assertThat(field.options()).containsOnly( //
OptionElement.create("default", Kind.ENUM, "TEST"), //
OptionElement.create("deprecated", Kind.BOOLEAN, "true"));
}
@Test public void deprecatedSupportStringAndBoolean() {
FieldElement field1 = FieldElement.builder()
.label(OPTIONAL)
.type(NamedType.create("CType"))
.name("ctype")
.tag(1)
.addOption(OptionElement.create("deprecated", Kind.STRING, "true"))
.build();
assertThat(field1.isDeprecated()).isTrue();
FieldElement field2 = FieldElement.builder()
.label(OPTIONAL)
.type(NamedType.create("CType"))
.name("ctype")
.tag(1)
.addOption(OptionElement.create("deprecated", Kind.BOOLEAN, "true"))
.build();
assertThat(field2.isDeprecated()).isTrue();
}
@Test public void packedSupportStringAndBoolean() {
FieldElement field1 = FieldElement.builder()
.label(OPTIONAL)
.type(NamedType.create("CType"))
.name("ctype")
.tag(1)
.addOption(OptionElement.create("packed", Kind.STRING, "true"))
.build();
assertThat(field1.isPacked()).isTrue();
FieldElement field2 = FieldElement.builder()
.label(OPTIONAL)
.type(NamedType.create("CType"))
.name("ctype")
.tag(1)
.addOption(OptionElement.create("packed", Kind.BOOLEAN, "true"))
.build();
assertThat(field2.isPacked()).isTrue();
}
@Test public void addMultipleOptions() {
OptionElement kitKat = OptionElement.create("kit", Kind.STRING, "kat");
OptionElement fooBar = OptionElement.create("foo", Kind.STRING, "bar");
FieldElement field = FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name")
.tag(1)
.addOptions(Arrays.asList(kitKat, fooBar))
.build();
assertThat(field.options()).hasSize(2);
}
@Test public void labelRequired() {
try {
FieldElement.builder().type(STRING).name("name").tag(1).build();
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("label == null");
}
}
@Test public void typeRequired() {
try {
FieldElement.builder().label(REQUIRED).name("name").tag(1).build();
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("type == null");
}
}
@Test public void nameRequired() {
try {
FieldElement.builder().label(REQUIRED).type(STRING).tag(1).build();
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("name == null");
}
}
@Test public void tagRequired() {
try {
FieldElement.builder().label(REQUIRED).type(STRING).name("name").build();
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("tag == null");
}
}
@Test public void nullBuilderValuesThrow() {
try {
FieldElement.builder().type(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("type == null");
}
try {
FieldElement.builder().name(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("name == null");
}
try {
FieldElement.builder().documentation(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("documentation == null");
}
try {
FieldElement.builder().addOption(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("option == null");
}
try {
FieldElement.builder().addOptions(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("options == null");
}
try {
FieldElement.builder().addOptions(Collections.<OptionElement>singleton(null));
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("option == null");
}
}
}
| 2,851 |
0 | Create_ds/protoparser/src/test/java/com/squareup | Create_ds/protoparser/src/test/java/com/squareup/protoparser/EnumElementTest.java | package com.squareup.protoparser;
import java.util.Arrays;
import java.util.Collections;
import org.junit.Test;
import static com.squareup.protoparser.OptionElement.Kind.BOOLEAN;
import static com.squareup.protoparser.OptionElement.Kind.STRING;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
public class EnumElementTest {
@Test public void nameRequired() {
try {
EnumElement.builder().qualifiedName("Test").build();
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("name == null");
}
}
@Test public void nameSetsQualifiedName() {
EnumElement test = EnumElement.builder().name("Test").build();
assertThat(test.name()).isEqualTo("Test");
assertThat(test.qualifiedName()).isEqualTo("Test");
}
@Test public void nullBuilderValuesThrow() {
try {
EnumElement.builder().name(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("name == null");
}
try {
EnumElement.builder().qualifiedName(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("qualifiedName == null");
}
try {
EnumElement.builder().documentation(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("documentation == null");
}
try {
EnumElement.builder().addConstant(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("constant == null");
}
try {
EnumElement.builder().addConstants(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("constants == null");
}
try {
EnumElement.builder().addConstants(Collections.<EnumConstantElement>singleton(null));
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("constant == null");
}
try {
EnumElement.builder().addOption(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("option == null");
}
try {
EnumElement.builder().addOptions(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("options == null");
}
try {
EnumElement.builder().addOptions(Collections.<OptionElement>singleton(null));
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("option == null");
}
}
@Test public void emptyToSchema() {
EnumElement element = EnumElement.builder().name("Enum").build();
String expected = "enum Enum {}\n";
assertThat(element.toSchema()).isEqualTo(expected);
}
@Test public void simpleToSchema() {
EnumElement element = EnumElement.builder()
.name("Enum")
.addConstant(EnumConstantElement.builder().name("ONE").tag(1).build())
.addConstant(EnumConstantElement.builder().name("TWO").tag(2).build())
.addConstant(EnumConstantElement.builder().name("SIX").tag(6).build())
.build();
String expected = ""
+ "enum Enum {\n"
+ " ONE = 1;\n"
+ " TWO = 2;\n"
+ " SIX = 6;\n"
+ "}\n";
assertThat(element.toSchema()).isEqualTo(expected);
}
@Test public void addMultipleConstants() {
EnumConstantElement one = EnumConstantElement.builder().name("ONE").tag(1).build();
EnumConstantElement two = EnumConstantElement.builder().name("TWO").tag(2).build();
EnumConstantElement six = EnumConstantElement.builder().name("SIX").tag(6).build();
EnumElement element = EnumElement.builder()
.name("Enum")
.addConstants(Arrays.asList(one, two, six))
.build();
assertThat(element.constants()).hasSize(3);
}
@Test public void simpleWithOptionsToSchema() {
EnumElement element = EnumElement.builder()
.name("Enum")
.addOption(OptionElement.create("kit", STRING, "kat"))
.addConstant(EnumConstantElement.builder().name("ONE").tag(1).build())
.addConstant(EnumConstantElement.builder().name("TWO").tag(2).build())
.addConstant(EnumConstantElement.builder().name("SIX").tag(6).build())
.build();
String expected = ""
+ "enum Enum {\n"
+ " option kit = \"kat\";\n"
+ "\n"
+ " ONE = 1;\n"
+ " TWO = 2;\n"
+ " SIX = 6;\n"
+ "}\n";
assertThat(element.toSchema()).isEqualTo(expected);
}
@Test public void addMultipleOptions() {
OptionElement kitKat = OptionElement.create("kit", STRING, "kat");
OptionElement fooBar = OptionElement.create("foo", STRING, "bar");
EnumElement element = EnumElement.builder()
.name("Enum")
.addOptions(Arrays.asList(kitKat, fooBar))
.addConstant(EnumConstantElement.builder().name("ONE").tag(1).build())
.build();
assertThat(element.options()).hasSize(2);
}
@Test public void simpleWithDocumentationToSchema() {
EnumElement element = EnumElement.builder()
.name("Enum")
.documentation("Hello")
.addConstant(EnumConstantElement.builder().name("ONE").tag(1).build())
.addConstant(EnumConstantElement.builder().name("TWO").tag(2).build())
.addConstant(EnumConstantElement.builder().name("SIX").tag(6).build())
.build();
String expected = ""
+ "// Hello\n"
+ "enum Enum {\n"
+ " ONE = 1;\n"
+ " TWO = 2;\n"
+ " SIX = 6;\n"
+ "}\n";
assertThat(element.toSchema()).isEqualTo(expected);
}
@Test public void fieldToSchema() {
EnumConstantElement value = EnumConstantElement.builder().name("NAME").tag(1).build();
String expected = "NAME = 1;\n";
assertThat(value.toSchema()).isEqualTo(expected);
}
@Test public void fieldWithDocumentationToSchema() {
EnumConstantElement value = EnumConstantElement.builder()
.name("NAME")
.tag(1)
.documentation("Hello")
.build();
String expected = ""
+ "// Hello\n"
+ "NAME = 1;\n";
assertThat(value.toSchema()).isEqualTo(expected);
}
@Test public void fieldWithOptionsToSchema() {
EnumConstantElement value = EnumConstantElement.builder()
.name("NAME")
.tag(1)
.addOption(OptionElement.create("kit", STRING, "kat", true))
.addOption(OptionElement.create("tit", STRING, "tat"))
.build();
String expected = "NAME = 1 [\n"
+ " (kit) = \"kat\",\n"
+ " tit = \"tat\"\n"
+ "];\n";
assertThat(value.toSchema()).isEqualTo(expected);
}
@Test public void duplicateValueTagThrows() {
try {
EnumElement.builder()
.name("Enum1")
.qualifiedName("example.Enum")
.addConstant(EnumConstantElement.builder().name("VALUE1").tag(1).build())
.addConstant(EnumConstantElement.builder().name("VALUE2").tag(1).build())
.build();
fail();
} catch (IllegalStateException e) {
assertThat(e).hasMessage("Duplicate tag 1 in example.Enum");
}
}
@Test public void duplicateValueTagWithAllowAlias() {
EnumElement element = EnumElement.builder()
.name("Enum1")
.qualifiedName("example.Enum")
.addOption(OptionElement.create("allow_alias", BOOLEAN, "true"))
.addConstant(EnumConstantElement.builder().name("VALUE1").tag(1).build())
.addConstant(EnumConstantElement.builder().name("VALUE2").tag(1).build())
.build();
assertThat(element.constants()).hasSize(2);
}
}
| 2,852 |
0 | Create_ds/protoparser/src/test/java/com/squareup | Create_ds/protoparser/src/test/java/com/squareup/protoparser/EnumConstantElementTest.java | package com.squareup.protoparser;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
public final class EnumConstantElementTest {
@Test public void nameRequired() {
try {
EnumConstantElement.builder().tag(1).build();
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("name == null");
}
}
@Test public void tagRequired() {
try {
EnumConstantElement.builder().name("Test").build();
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("tag == null");
}
}
@Test public void nullBuilderValuesThrow() {
try {
EnumConstantElement.builder().name(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("name == null");
}
try {
EnumConstantElement.builder().documentation(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("documentation == null");
}
try {
EnumConstantElement.builder().addOption(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("option == null");
}
}
}
| 2,853 |
0 | Create_ds/protoparser/src/test/java/com/squareup | Create_ds/protoparser/src/test/java/com/squareup/protoparser/UtilsTest.java | package com.squareup.protoparser;
import org.junit.Test;
import static com.squareup.protoparser.Utils.appendDocumentation;
import static com.squareup.protoparser.Utils.appendIndented;
import static org.assertj.core.api.Assertions.assertThat;
public class UtilsTest {
@Test public void indentationTest() {
String input = "Foo\nBar\nBaz";
String expected = " Foo\n Bar\n Baz\n";
StringBuilder builder = new StringBuilder();
appendIndented(builder, input);
assertThat(builder.toString()).isEqualTo(expected);
}
@Test public void documentationTest() {
String input = "Foo\nBar\nBaz";
String expected = ""
+ "// Foo\n"
+ "// Bar\n"
+ "// Baz\n";
StringBuilder builder = new StringBuilder();
appendDocumentation(builder, input);
assertThat(builder.toString()).isEqualTo(expected);
}
}
| 2,854 |
0 | Create_ds/protoparser/src/test/java/com/squareup | Create_ds/protoparser/src/test/java/com/squareup/protoparser/ParsingTester.java | package com.squareup.protoparser;
import java.io.File;
import java.util.ArrayDeque;
import java.util.Collections;
import java.util.Deque;
/** Recursively traverse a directory and attempt to parse all of its proto files. */
public class ParsingTester {
/** Directory under which to search for protos. Change as needed. */
private static final File ROOT = new File("/path/to/protos");
public static void main(String... args) {
int total = 0;
int failed = 0;
Deque<File> fileQueue = new ArrayDeque<>();
fileQueue.add(ROOT);
while (!fileQueue.isEmpty()) {
File file = fileQueue.removeFirst();
if (file.isDirectory()) {
Collections.addAll(fileQueue, file.listFiles());
} else if (file.getName().endsWith(".proto")) {
System.out.println("Parsing " + file.getPath());
total += 1;
try {
ProtoParser.parseUtf8(file);
} catch (Exception e) {
e.printStackTrace();
failed += 1;
}
}
}
System.out.println("\nTotal: " + total + " Failed: " + failed);
}
}
| 2,855 |
0 | Create_ds/protoparser/src/test/java/com/squareup | Create_ds/protoparser/src/test/java/com/squareup/protoparser/ProtoParserTest.java | // Copyright 2013 Square, Inc.
package com.squareup.protoparser;
import com.squareup.protoparser.DataType.MapType;
import com.squareup.protoparser.DataType.NamedType;
import com.squareup.protoparser.DataType.ScalarType;
import com.squareup.protoparser.OptionElement.Kind;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import static com.squareup.protoparser.DataType.ScalarType.ANY;
import static com.squareup.protoparser.DataType.ScalarType.BOOL;
import static com.squareup.protoparser.DataType.ScalarType.BYTES;
import static com.squareup.protoparser.DataType.ScalarType.DOUBLE;
import static com.squareup.protoparser.DataType.ScalarType.FIXED32;
import static com.squareup.protoparser.DataType.ScalarType.FIXED64;
import static com.squareup.protoparser.DataType.ScalarType.FLOAT;
import static com.squareup.protoparser.DataType.ScalarType.INT32;
import static com.squareup.protoparser.DataType.ScalarType.INT64;
import static com.squareup.protoparser.DataType.ScalarType.SFIXED32;
import static com.squareup.protoparser.DataType.ScalarType.SFIXED64;
import static com.squareup.protoparser.DataType.ScalarType.SINT32;
import static com.squareup.protoparser.DataType.ScalarType.SINT64;
import static com.squareup.protoparser.DataType.ScalarType.STRING;
import static com.squareup.protoparser.DataType.ScalarType.UINT32;
import static com.squareup.protoparser.DataType.ScalarType.UINT64;
import static com.squareup.protoparser.FieldElement.Label.ONE_OF;
import static com.squareup.protoparser.FieldElement.Label.OPTIONAL;
import static com.squareup.protoparser.FieldElement.Label.REQUIRED;
import static com.squareup.protoparser.TestUtils.list;
import static com.squareup.protoparser.TestUtils.map;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
public final class ProtoParserTest {
@Test public void typeParsing() {
String proto = ""
+ "message Types {\n"
+ " required any f1 = 1;\n"
+ " required bool f2 = 2;\n"
+ " required bytes f3 = 3;\n"
+ " required double f4 = 4;\n"
+ " required float f5 = 5;\n"
+ " required fixed32 f6 = 6;\n"
+ " required fixed64 f7 = 7;\n"
+ " required int32 f8 = 8;\n"
+ " required int64 f9 = 9;\n"
+ " required sfixed32 f10 = 10;\n"
+ " required sfixed64 f11 = 11;\n"
+ " required sint32 f12 = 12;\n"
+ " required sint64 f13 = 13;\n"
+ " required string f14 = 14;\n"
+ " required uint32 f15 = 15;\n"
+ " required uint64 f16 = 16;\n"
+ " required map<string, bool> f17 = 17;\n"
+ " required map<arbitrary, nested.nested> f18 = 18;\n"
+ " required arbitrary f19 = 19;\n"
+ " required nested.nested f20 = 20;\n"
+ "}\n";
ProtoFile expected = ProtoFile.builder("test.proto")
.addType(MessageElement.builder()
.name("Types")
.addField(FieldElement.builder().label(REQUIRED).type(ANY).name("f1").tag(1).build())
.addField(FieldElement.builder().label(REQUIRED).type(BOOL).name("f2").tag(2).build())
.addField(FieldElement.builder().label(REQUIRED).type(BYTES).name("f3").tag(3).build())
.addField(FieldElement.builder().label(REQUIRED).type(DOUBLE).name("f4").tag(4).build())
.addField(FieldElement.builder().label(REQUIRED).type(FLOAT).name("f5").tag(5).build())
.addField(
FieldElement.builder().label(REQUIRED).type(FIXED32).name("f6").tag(6).build())
.addField(
FieldElement.builder().label(REQUIRED).type(FIXED64).name("f7").tag(7).build())
.addField(FieldElement.builder().label(REQUIRED).type(INT32).name("f8").tag(8).build())
.addField(FieldElement.builder().label(REQUIRED).type(INT64).name("f9").tag(9).build())
.addField(
FieldElement.builder().label(REQUIRED).type(SFIXED32).name("f10").tag(10).build())
.addField(
FieldElement.builder().label(REQUIRED).type(SFIXED64).name("f11").tag(11).build())
.addField(
FieldElement.builder().label(REQUIRED).type(SINT32).name("f12").tag(12).build())
.addField(
FieldElement.builder().label(REQUIRED).type(SINT64).name("f13").tag(13).build())
.addField(
FieldElement.builder().label(REQUIRED).type(STRING).name("f14").tag(14).build())
.addField(
FieldElement.builder().label(REQUIRED).type(UINT32).name("f15").tag(15).build())
.addField(
FieldElement.builder().label(REQUIRED).type(UINT64).name("f16").tag(16).build())
.addField(FieldElement.builder()
.label(REQUIRED)
.type(MapType.create(STRING, BOOL))
.name("f17")
.tag(17)
.build())
.addField(FieldElement.builder()
.label(REQUIRED)
.type(MapType.create(NamedType.create("arbitrary"),
NamedType.create("nested.nested")))
.name("f18")
.tag(18)
.build())
.addField(FieldElement.builder()
.label(REQUIRED)
.type(NamedType.create("arbitrary"))
.name("f19")
.tag(19)
.build())
.addField(FieldElement.builder()
.label(REQUIRED)
.type(NamedType.create("nested.nested"))
.name("f20")
.tag(20)
.build())
.build())
.build();
assertThat(ProtoParser.parse("test.proto", proto)).isEqualTo(expected);
}
@Test public void singleLineComment() {
String proto = ""
+ "// Test all the things!\n"
+ "message Test {}";
ProtoFile parsed = ProtoParser.parse("test.proto", proto);
TypeElement type = parsed.typeElements().get(0);
assertThat(type.documentation()).isEqualTo("Test all the things!");
}
@Test public void multipleSingleLineComments() {
String proto = ""
+ "// Test all\n"
+ "// the things!\n"
+ "message Test {}";
String expected = ""
+ "Test all\n"
+ "the things!";
ProtoFile parsed = ProtoParser.parse("test.proto", proto);
TypeElement type = parsed.typeElements().get(0);
assertThat(type.documentation()).isEqualTo(expected);
}
@Test public void singleLineJavadocComment() {
String proto = ""
+ "/** Test */\n"
+ "message Test {}";
ProtoFile parsed = ProtoParser.parse("test.proto", proto);
TypeElement type = parsed.typeElements().get(0);
assertThat(type.documentation()).isEqualTo("Test");
}
@Test public void multilineJavadocComment() {
String proto = ""
+ "/**\n"
+ " * Test\n"
+ " *\n"
+ " * Foo\n"
+ " */\n"
+ "message Test {}";
String expected = ""
+ "Test\n"
+ "\n"
+ "Foo";
ProtoFile parsed = ProtoParser.parse("test.proto", proto);
TypeElement type = parsed.typeElements().get(0);
assertThat(type.documentation()).isEqualTo(expected);
}
@Test public void multipleSingleLineCommentsWithLeadingWhitespace() {
String proto = ""
+ "// Test\n"
+ "// All\n"
+ "// The\n"
+ "// Things!\n"
+ "message Test {}";
String expected = ""
+ "Test\n"
+ " All\n"
+ " The\n"
+ " Things!";
ProtoFile parsed = ProtoParser.parse("test.proto", proto);
TypeElement type = parsed.typeElements().get(0);
assertThat(type.documentation()).isEqualTo(expected);
}
@Test public void multilineJavadocCommentWithLeadingWhitespace() {
String proto = ""
+ "/**\n"
+ " * Test\n"
+ " * All\n"
+ " * The\n"
+ " * Things!\n"
+ " */\n"
+ "message Test {}";
String expected = ""
+ "Test\n"
+ " All\n"
+ " The\n"
+ " Things!";
ProtoFile parsed = ProtoParser.parse("test.proto", proto);
TypeElement type = parsed.typeElements().get(0);
assertThat(type.documentation()).isEqualTo(expected);
}
@Test public void multilineJavadocCommentWithoutLeadingAsterisks() {
// We do not honor leading whitespace when the comment lacks leading asterisks.
String proto = ""
+ "/**\n"
+ " Test\n"
+ " All\n"
+ " The\n"
+ " Things!\n"
+ " */\n"
+ "message Test {}";
String expected = ""
+ "Test\n"
+ "All\n"
+ "The\n"
+ "Things!";
ProtoFile parsed = ProtoParser.parse("test.proto", proto);
TypeElement type = parsed.typeElements().get(0);
assertThat(type.documentation()).isEqualTo(expected);
}
@Test public void messageFieldTrailingComment() {
// Trailing message field comment.
String proto = ""
+ "message Test {\n"
+ " optional string name = 1; // Test all the things!\n"
+ "}";
ProtoFile parsed = ProtoParser.parse("test.proto", proto);
MessageElement message = (MessageElement) parsed.typeElements().get(0);
FieldElement field = message.fields().get(0);
assertThat(field.documentation()).isEqualTo("Test all the things!");
}
@Test public void messageFieldLeadingAndTrailingCommentAreCombined() {
String proto = ""
+ "message Test {\n"
+ " // Test all...\n"
+ " optional string name = 1; // ...the things!\n"
+ "}";
ProtoFile parsed = ProtoParser.parse("test.proto", proto);
MessageElement message = (MessageElement) parsed.typeElements().get(0);
FieldElement field = message.fields().get(0);
assertThat(field.documentation()).isEqualTo("Test all...\n...the things!");
}
@Test public void trailingCommentNotAssignedToFollowingField() {
String proto = ""
+ "message Test {\n"
+ " optional string first_name = 1; // Testing!\n"
+ " optional string last_name = 2;\n"
+ "}";
ProtoFile parsed = ProtoParser.parse("test.proto", proto);
MessageElement message = (MessageElement) parsed.typeElements().get(0);
FieldElement field1 = message.fields().get(0);
assertThat(field1.documentation()).isEqualTo("Testing!");
FieldElement field2 = message.fields().get(1);
assertThat(field2.documentation()).isEqualTo("");
}
@Test public void enumValueTrailingComment() {
String proto = ""
+ "enum Test {\n"
+ " FOO = 1; // Test all the things! \n"
+ "}";
ProtoFile parsed = ProtoParser.parse("test.proto", proto);
EnumElement enumElement = (EnumElement) parsed.typeElements().get(0);
EnumConstantElement value = enumElement.constants().get(0);
assertThat(value.documentation()).isEqualTo("Test all the things!");
}
@Test public void trailingMultilineComment() {
String proto = ""
+ "enum Test {\n"
+ " FOO = 1; /* Test all the things! */ \n"
+ " BAR = 2;/*Test all the things!*/\n"
+ "}";
ProtoFile parsed = ProtoParser.parse("test.proto", proto);
EnumElement enumElement = (EnumElement) parsed.typeElements().get(0);
EnumConstantElement foo = enumElement.constants().get(0);
assertThat(foo.documentation()).isEqualTo("Test all the things!");
EnumConstantElement bar = enumElement.constants().get(1);
assertThat(bar.documentation()).isEqualTo("Test all the things!");
}
@Test public void trailingUnclosedMultilineCommentThrows() {
String proto = ""
+ "enum Test {\n"
+ " FOO = 1; /* Test all the things! \n"
+ "}";
try {
ProtoParser.parse("test.proto", proto);
} catch (IllegalStateException e) {
assertThat(e).hasMessage(
"Syntax error in test.proto at 2:38: trailing comment must be closed on the same line");
}
}
@Test public void trailingMultilineCommentMustBeLastOnLineThrows() {
String proto = ""
+ "enum Test {\n"
+ " FOO = 1; /* Test all the things! */ BAR = 2;\n"
+ "}";
try {
ProtoParser.parse("test.proto", proto);
} catch (IllegalStateException e) {
assertThat(e).hasMessage(
"Syntax error in test.proto at 2:40: no syntax may follow trailing comment");
}
}
@Test public void invalidTrailingComment() {
String proto = ""
+ "enum Test {\n"
+ " FOO = 1; /\n"
+ "}";
try {
ProtoParser.parse("test.proto", proto);
} catch (IllegalStateException e) {
assertThat(e).hasMessage(
"Syntax error in test.proto at 2:12: expected '//' or '/*'");
}
}
@Test public void enumValueLeadingAndTrailingCommentsAreCombined() {
String proto = ""
+ "enum Test {\n"
+ " // Test all...\n"
+ " FOO = 1; // ...the things!\n"
+ "}";
ProtoFile parsed = ProtoParser.parse("test.proto", proto);
EnumElement enumElement = (EnumElement) parsed.typeElements().get(0);
EnumConstantElement value = enumElement.constants().get(0);
assertThat(value.documentation()).isEqualTo("Test all...\n...the things!");
}
@Test public void trailingCommentNotCombinedWhenEmpty() {
String proto = ""
+ "enum Test {\n"
+ " // Test all...\n"
+ " FOO = 1; // \n"
+ "}";
ProtoFile parsed = ProtoParser.parse("test.proto", proto);
EnumElement enumElement = (EnumElement) parsed.typeElements().get(0);
EnumConstantElement value = enumElement.constants().get(0);
assertThat(value.documentation()).isEqualTo("Test all...");
}
@Test public void syntaxNotRequired() throws Exception {
String proto = "message Foo {}";
ProtoFile parsed = ProtoParser.parse("test.proto", proto);
assertThat(parsed.syntax()).isNull();
}
@Test public void syntaxSpecified() throws Exception {
String proto = ""
+ "syntax = \"proto3\";\n"
+ "message Foo {}";
ProtoFile expected = ProtoFile.builder("test.proto")
.syntax(ProtoFile.Syntax.PROTO_3)
.addType(MessageElement.builder().name("Foo").build())
.build();
assertThat(ProtoParser.parse("test.proto", proto)).isEqualTo(expected);
}
@Test public void invalidSyntaxValueThrows() throws Exception {
String proto = ""
+ "syntax = \"proto4\";\n"
+ "message Foo {}";
try {
ProtoParser.parse("test.proto", proto);
} catch (IllegalStateException e) {
assertThat(e).hasMessage(
"Syntax error in test.proto at 1:18: 'syntax' must be 'proto2' or 'proto3'. Found: proto4");
}
}
@Test public void syntaxInWrongContextThrows() {
String proto = ""
+ "message Foo {\n"
+ " syntax = \"proto2\";\n"
+ "}";
try {
ProtoParser.parse("test.proto", proto);
} catch (IllegalStateException e) {
assertThat(e).hasMessage("Syntax error in test.proto at 2:9: 'syntax' in MESSAGE");
}
}
@Test public void parseMessageAndFields() throws Exception {
String proto = ""
+ "message SearchRequest {\n"
+ " required string query = 1;\n"
+ " optional int32 page_number = 2;\n"
+ " optional int32 result_per_page = 3;\n"
+ "}";
ProtoFile expected = ProtoFile.builder("search.proto")
.addType(MessageElement.builder()
.name("SearchRequest")
.addField(FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("query")
.tag(1)
.build())
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(INT32)
.name("page_number")
.tag(2)
.build())
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(INT32)
.name("result_per_page")
.tag(3)
.build())
.build())
.build();
assertThat(ProtoParser.parse("search.proto", proto)).isEqualTo(expected);
}
@Test public void parseMessageAndOneOf() throws Exception {
String proto = ""
+ "message SearchRequest {\n"
+ " required string query = 1;\n"
+ " oneof page_info {\n"
+ " int32 page_number = 2;\n"
+ " int32 result_per_page = 3;\n"
+ " }\n"
+ "}";
ProtoFile expected = ProtoFile.builder("search.proto")
.addType(MessageElement.builder()
.name("SearchRequest")
.addField(FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("query")
.tag(1)
.build())
.addOneOf(OneOfElement.builder()
.name("page_info")
.addField(FieldElement.builder()
.label(ONE_OF)
.type(INT32)
.name("page_number")
.tag(2)
.build())
.addField(FieldElement.builder()
.label(ONE_OF)
.type(INT32)
.name("result_per_page")
.tag(3)
.build())
.build())
.build())
.build();
assertThat(ProtoParser.parse("search.proto", proto)).isEqualTo(expected);
}
@Test public void parseEnum() throws Exception {
String proto = ""
+ "/**\n"
+ " * What's on my waffles.\n"
+ " * Also works on pancakes.\n"
+ " */\n"
+ "enum Topping {\n"
+ " FRUIT = 1;\n"
+ " /** Yummy, yummy cream. */\n"
+ " CREAM = 2;\n"
+ "\n"
+ " // Quebec Maple syrup\n"
+ " SYRUP = 3;\n"
+ "}\n";
ProtoFile expected = ProtoFile.builder("waffles.proto")
.addType(EnumElement.builder()
.name("Topping")
.documentation("What's on my waffles.\nAlso works on pancakes.")
.addConstant(EnumConstantElement.builder().name("FRUIT").tag(1).build())
.addConstant(EnumConstantElement.builder()
.name("CREAM")
.tag(2)
.documentation("Yummy, yummy cream.")
.build())
.addConstant(EnumConstantElement.builder()
.name("SYRUP")
.tag(3)
.documentation("Quebec Maple syrup")
.build())
.build())
.build();
assertThat(ProtoParser.parse("waffles.proto", proto)).isEqualTo(expected);
}
@Test public void parseEnumWithOptions() throws Exception {
String proto = ""
+ "/**\n"
+ " * What's on my waffles.\n"
+ " * Also works on pancakes.\n"
+ " */\n"
+ "enum Topping {\n"
+ " option (max_choices) = 2;\n"
+ "\n"
+ " FRUIT = 1 [(healthy) = true];\n"
+ " /** Yummy, yummy cream. */\n"
+ " CREAM = 2;\n"
+ "\n"
+ " // Quebec Maple syrup\n"
+ " SYRUP = 3;\n"
+ "}\n";
ProtoFile expected = ProtoFile.builder("waffles.proto")
.addType(EnumElement.builder()
.name("Topping")
.qualifiedName("Topping")
.documentation("What's on my waffles.\nAlso works on pancakes.")
.addOption(OptionElement.create("max_choices", Kind.NUMBER, "2", true))
.addConstant(EnumConstantElement.builder()
.name("FRUIT")
.tag(1)
.addOption(OptionElement.create("healthy", Kind.BOOLEAN, "true", true))
.build())
.addConstant(EnumConstantElement.builder()
.name("CREAM")
.tag(2)
.documentation("Yummy, yummy cream.")
.build())
.addConstant(EnumConstantElement.builder()
.name("SYRUP")
.tag(3)
.documentation("Quebec Maple syrup")
.build())
.build())
.build();
assertThat(ProtoParser.parse("waffles.proto", proto)).isEqualTo(expected);
}
@Test public void packageDeclaration() throws Exception {
String proto = ""
+ "package google.protobuf;\n"
+ "option java_package = \"com.google.protobuf\";\n"
+ "\n"
+ "// The protocol compiler can output a FileDescriptorSet containing the .proto\n"
+ "// files it parses.\n"
+ "message FileDescriptorSet {\n"
+ "}\n";
ProtoFile expected = ProtoFile.builder("descriptor.proto")
.packageName("google.protobuf")
.addType(MessageElement.builder()
.name("FileDescriptorSet")
.qualifiedName("google.protobuf.FileDescriptorSet")
.documentation(
"The protocol compiler can output a FileDescriptorSet containing the .proto\nfiles it parses.")
.build())
.addOption(OptionElement.create("java_package", Kind.STRING, "com.google.protobuf"))
.build();
assertThat(ProtoParser.parse("descriptor.proto", proto)).isEqualTo(expected);
}
@Test public void nestingInMessage() throws Exception {
String proto = ""
+ "message FieldOptions {\n"
+ " optional CType ctype = 1 [default = STRING, deprecated=true];\n"
+ " enum CType {\n"
+ " STRING = 0[(opt_a) = 1, (opt_b) = 2];\n"
+ " };\n"
+ " // Clients can define custom options in extensions of this message. See above.\n"
+ " extensions 500;\n"
+ " extensions 1000 to max;\n"
+ "}\n";
TypeElement enumElement = EnumElement.builder()
.name("CType")
.qualifiedName("FieldOptions.CType")
.addConstant(EnumConstantElement.builder()
.name("STRING")
.tag(0)
.addOption(OptionElement.create("opt_a", Kind.NUMBER, "1", true))
.addOption(OptionElement.create("opt_b", Kind.NUMBER, "2", true))
.build())
.build();
FieldElement field = FieldElement.builder()
.label(OPTIONAL)
.type(NamedType.create("CType"))
.name("ctype")
.tag(1)
.addOption(OptionElement.create("default", Kind.ENUM, "STRING"))
.addOption(OptionElement.create("deprecated", Kind.BOOLEAN, "true"))
.build();
assertThat(field.options()).containsOnly( //
OptionElement.create("default", Kind.ENUM, "STRING"), //
OptionElement.create("deprecated", Kind.BOOLEAN, "true"));
TypeElement messageElement = MessageElement.builder()
.name("FieldOptions")
.addField(field)
.addType(enumElement)
.addExtensions(ExtensionsElement.create(500, 500,
"Clients can define custom options in extensions of this message. See above."))
.addExtensions(ExtensionsElement.create(1000, ProtoFile.MAX_TAG_VALUE))
.build();
ProtoFile expected = ProtoFile.builder("descriptor.proto").addType(messageElement).build();
ProtoFile actual = ProtoParser.parse("descriptor.proto", proto);
assertThat(actual).isEqualTo(expected);
}
@Test public void optionParentheses() throws Exception {
String proto = ""
+ "message Chickens {\n"
+ " optional bool koka_ko_koka_ko = 1 [default = true];\n"
+ " optional bool coodle_doodle_do = 2 [(delay) = 100, default = false];\n"
+ " optional bool coo_coo_ca_cha = 3 [default = true, (delay) = 200];\n"
+ " optional bool cha_chee_cha = 4;\n"
+ "}\n";
ProtoFile expected = ProtoFile.builder("chickens.proto")
.addType(MessageElement.builder()
.name("Chickens")
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(BOOL)
.name("koka_ko_koka_ko")
.tag(1)
.addOption(OptionElement.create("default", Kind.BOOLEAN, "true"))
.build())
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(BOOL)
.name("coodle_doodle_do")
.tag(2)
.addOption(OptionElement.create("delay", Kind.NUMBER, "100", true))
.addOption(OptionElement.create("default", Kind.BOOLEAN, "false"))
.build())
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(BOOL)
.name("coo_coo_ca_cha")
.tag(3)
.addOption(OptionElement.create("default", Kind.BOOLEAN, "true"))
.addOption(OptionElement.create("delay", Kind.NUMBER, "200", true))
.build())
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(BOOL)
.name("cha_chee_cha")
.tag(4)
.build())
.build())
.build();
assertThat(ProtoParser.parse("chickens.proto", proto)).isEqualTo(expected);
}
@Test public void imports() throws Exception {
String proto = "import \"src/test/resources/unittest_import.proto\";\n";
ProtoFile expected = ProtoFile.builder("descriptor.proto")
.addDependency("src/test/resources/unittest_import.proto")
.build();
assertThat(ProtoParser.parse("descriptor.proto", proto)).isEqualTo(expected);
}
@Test public void publicImports() throws Exception {
String proto = "import public \"src/test/resources/unittest_import.proto\";\n";
ProtoFile expected = ProtoFile.builder("descriptor.proto")
.addPublicDependency("src/test/resources/unittest_import.proto")
.build();
assertThat(ProtoParser.parse("descriptor.proto", proto)).isEqualTo(expected);
}
@Test public void extend() throws Exception {
String proto = ""
+ "// Extends Foo\n"
+ "extend Foo {\n"
+ " optional int32 bar = 126;\n"
+ "}";
ProtoFile expected = ProtoFile.builder("descriptor.proto")
.addExtendDeclaration(ExtendElement.builder()
.name("Foo")
.documentation("Extends Foo")
.addField(
FieldElement.builder().label(OPTIONAL).type(INT32).name("bar").tag(126).build())
.build())
.build();
assertThat(ProtoParser.parse("descriptor.proto", proto)).isEqualTo(expected);
}
@Test public void extendInMessage() throws Exception {
String proto = ""
+ "message Bar {\n"
+ " extend Foo {\n"
+ " optional Bar bar = 126;\n"
+ " }\n"
+ "}";
ProtoFile expected = ProtoFile.builder("descriptor.proto")
.addType(MessageElement.builder().name("Bar").build())
.addExtendDeclaration(ExtendElement.builder()
.name("Foo")
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(NamedType.create("Bar"))
.name("bar")
.tag(126)
.build())
.build())
.build();
assertThat(ProtoParser.parse("descriptor.proto", proto)).isEqualTo(expected);
}
@Test public void extendInMessageWithPackage() throws Exception {
String proto = ""
+ "package kit.kat;\n"
+ ""
+ "message Bar {\n"
+ " extend Foo {\n"
+ " optional Bar bar = 126;\n"
+ " }\n"
+ "}";
ProtoFile expected = ProtoFile.builder("descriptor.proto")
.packageName("kit.kat")
.addType(MessageElement.builder().name("Bar").qualifiedName("kit.kat.Bar").build())
.addExtendDeclaration(ExtendElement.builder()
.name("Foo")
.qualifiedName("kit.kat.Foo")
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(NamedType.create("Bar"))
.name("bar")
.tag(126)
.build())
.build())
.build();
assertThat(ProtoParser.parse("descriptor.proto", proto)).isEqualTo(expected);
}
@Test public void fqcnExtendInMessage() throws Exception {
String proto = ""
+ "message Bar {\n"
+ " extend example.Foo {\n"
+ " optional Bar bar = 126;\n"
+ " }\n"
+ "}";
ProtoFile expected = ProtoFile.builder("descriptor.proto")
.addType(MessageElement.builder().name("Bar").build())
.addExtendDeclaration(ExtendElement.builder()
.name("example.Foo")
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(NamedType.create("Bar"))
.name("bar")
.tag(126)
.build())
.build())
.build();
assertThat(ProtoParser.parse("descriptor.proto", proto)).isEqualTo(expected);
}
@Test public void fqcnExtendInMessageWithPackage() throws Exception {
String proto = ""
+ "package kit.kat;\n"
+ ""
+ "message Bar {\n"
+ " extend example.Foo {\n"
+ " optional Bar bar = 126;\n"
+ " }\n"
+ "}";
ProtoFile expected = ProtoFile.builder("descriptor.proto")
.packageName("kit.kat")
.addType(MessageElement.builder().name("Bar").qualifiedName("kit.kat.Bar").build())
.addExtendDeclaration(ExtendElement.builder()
.name("example.Foo")
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(NamedType.create("Bar"))
.name("bar")
.tag(126)
.build())
.build())
.build();
assertThat(ProtoParser.parse("descriptor.proto", proto)).isEqualTo(expected);
}
@Test public void defaultFieldWithParen() throws Exception {
String proto = ""
+ "message Foo {\n"
+ " optional string claim_token = 2 [(squareup.redacted) = true];\n"
+ "}";
FieldElement field = FieldElement.builder()
.label(OPTIONAL)
.type(STRING)
.name("claim_token")
.tag(2)
.addOption(OptionElement.create("squareup.redacted", Kind.BOOLEAN, "true", true))
.build();
assertThat(field.options()).containsOnly(
OptionElement.create("squareup.redacted", Kind.BOOLEAN, "true", true));
TypeElement messageElement = MessageElement.builder().name("Foo").addField(field).build();
ProtoFile expected = ProtoFile.builder("descriptor.proto").addType(messageElement).build();
assertThat(ProtoParser.parse("descriptor.proto", proto))
.isEqualTo(expected);
}
// Parse \a, \b, \f, \n, \r, \t, \v, \[0-7]{1-3}, and \[xX]{0-9a-fA-F]{1,2}
@Test public void defaultFieldWithStringEscapes() throws Exception {
String proto = ""
+ "message Foo {\n"
+ " optional string name = 1 "
+ "[default = \"\\a\\b\\f\\n\\r\\t\\v\1f\01\001\11\011\111\\xe\\Xe\\xE\\xE\\x41\\X41\"];\n"
+ "}";
FieldElement
field = FieldElement.builder()
.label(OPTIONAL)
.type(STRING)
.name("name")
.tag(1)
.addOption(OptionElement.create("default", Kind.STRING,
"\u0007\b\f\n\r\t\u000b\u0001f\u0001\u0001\u0009\u0009I\u000e\u000e\u000e\u000eAA"))
.build();
assertThat(field.options()).containsOnly(OptionElement.create("default", Kind.STRING,
"\u0007\b\f\n\r\t\u000b\u0001f\u0001\u0001\u0009\u0009I\u000e\u000e\u000e\u000eAA"));
TypeElement messageElement = MessageElement.builder().name("Foo").addField(field).build();
ProtoFile expected = ProtoFile.builder("foo.proto").addType(messageElement).build();
assertThat(ProtoParser.parse("foo.proto", proto))
.isEqualTo(expected);
}
@Test public void invalidHexStringEscape() throws Exception {
String proto = ""
+ "message Foo {\n"
+ " optional string name = 1 "
+ "[default = \"\\xW\"];\n"
+ "}";
try {
ProtoParser.parse("foo.proto", proto);
fail();
} catch (IllegalStateException e) {
assertThat(e.getMessage().contains("expected a digit after \\x or \\X"));
}
}
@Test public void service() throws Exception {
String proto = ""
+ "service SearchService {\n"
+ " option (default_timeout) = 30;\n"
+ "\n"
+ " rpc Search (SearchRequest) returns (SearchResponse);"
+ " rpc Purchase (PurchaseRequest) returns (PurchaseResponse) {\n"
+ " option (squareup.sake.timeout) = 15; \n"
+ " option (squareup.a.b) = {"
+ " value: ["
+ " FOO,"
+ " BAR"
+ " ]"
+ " };\n"
+ " }\n"
+ "}";
ProtoFile expected = ProtoFile.builder("descriptor.proto")
.addService(ServiceElement.builder()
.name("SearchService")
.addOption(OptionElement.create("default_timeout", Kind.NUMBER, "30", true))
.addRpc(RpcElement.builder()
.name("Search")
.requestType(NamedType.create("SearchRequest"))
.responseType(NamedType.create("SearchResponse"))
.build())
.addRpc(RpcElement.builder()
.name("Purchase")
.requestType(NamedType.create("PurchaseRequest"))
.responseType(NamedType.create("PurchaseResponse"))
.addOption(OptionElement.create("squareup.sake.timeout", Kind.NUMBER, "15", true))
.addOption(OptionElement.create("squareup.a.b", Kind.MAP, map("value", //
list("FOO", "BAR")), true))
.build())
.build())
.build();
assertThat(ProtoParser.parse("descriptor.proto", proto)).isEqualTo(expected);
}
@Test public void serviceTypesMustBeNamed() {
try {
String proto = ""
+ "service SearchService {\n"
+ " rpc Search (string) returns (SearchResponse);"
+ "}";
ProtoParser.parse("test.proto", proto);
fail();
} catch (IllegalStateException e) {
assertThat(e).hasMessage("Syntax error in test.proto at 2:21: expected message but was string");
}
try {
String proto = ""
+ "service SearchService {\n"
+ " rpc Search (SearchRequest) returns (string);"
+ "}";
ProtoParser.parse("test.proto", proto);
fail();
} catch (IllegalStateException e) {
assertThat(e).hasMessage("Syntax error in test.proto at 2:45: expected message but was string");
}
}
@Test public void hexTag() throws Exception {
String proto = ""
+ "message HexTag {\n"
+ " required string hex = 0x10;\n"
+ " required string uppercase_x_hex = 0X11;\n"
+ "}";
ProtoFile expected = ProtoFile.builder("hex.proto")
.addType(MessageElement.builder()
.name("HexTag")
.addField(FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("hex")
.tag(16)
.build())
.addField(FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("uppercase_x_hex")
.tag(17)
.build())
.build())
.build();
assertThat(ProtoParser.parse("hex.proto", proto)).isEqualTo(expected);
}
@Test public void structuredOption() throws Exception {
String proto = ""
+ "message ExoticOptions {\n"
+ " option (squareup.one) = {name: \"Name\", class_name:\"ClassName\"};\n"
+ " option (squareup.two.a) = {[squareup.options.type]: EXOTIC};\n"
+ " option (squareup.two.b) = {names: [\"Foo\", \"Bar\"]};\n"
+ " option (squareup.three) = {x: {y: 1 y: 2}};\n" // NOTE: Omitted optional comma
+ " option (squareup.four) = {x: {y: {z: 1}, y: {z: 2}}};\n"
+ "}";
MessageElement.Builder expectedBuilder = MessageElement.builder().name("ExoticOptions");
Map<String, String> option_one_map = new LinkedHashMap<>();
option_one_map.put("name", "Name");
option_one_map.put("class_name", "ClassName");
expectedBuilder.addOption(OptionElement.create("squareup.one", Kind.MAP, option_one_map, true));
Map<String, Object> option_two_a_map = new LinkedHashMap<>();
option_two_a_map.put("[squareup.options.type]", "EXOTIC");
expectedBuilder.addOption(OptionElement.create("squareup.two.a", Kind.MAP, option_two_a_map, true));
Map<String, List<String>> option_two_b_map = new LinkedHashMap<>();
option_two_b_map.put("names", Arrays.asList("Foo", "Bar"));
expectedBuilder.addOption(OptionElement.create("squareup.two.b", Kind.MAP, option_two_b_map, true));
Map<String, Map<String, ?>> option_three_map = new LinkedHashMap<>();
Map<String, Object> option_three_nested_map = new LinkedHashMap<>();
option_three_nested_map.put("y", Arrays.asList("1", "2"));
option_three_map.put("x", option_three_nested_map);
expectedBuilder.addOption(OptionElement.create("squareup.three", Kind.MAP, option_three_map, true));
Map<String, Map<String, ?>> option_four_map = new LinkedHashMap<>();
Map<String, Object> option_four_map_1 = new LinkedHashMap<>();
Map<String, Object> option_four_map_2_a = new LinkedHashMap<>();
option_four_map_2_a.put("z", "1");
Map<String, Object> option_four_map_2_b = new LinkedHashMap<>();
option_four_map_2_b.put("z", "2");
option_four_map_1.put("y", Arrays.asList(option_four_map_2_a, option_four_map_2_b));
option_four_map.put("x", option_four_map_1);
expectedBuilder.addOption(OptionElement.create("squareup.four", Kind.MAP, option_four_map, true));
ProtoFile expected = ProtoFile.builder("exotic.proto").addType(expectedBuilder.build()).build();
assertThat(ProtoParser.parse("exotic.proto", proto)).isEqualTo(expected);
}
@Test public void optionsWithNestedMapsAndTrailingCommas() throws Exception {
String proto = ""
+ "message StructuredOption {\n"
+ " optional field.type has_options = 3 [\n"
+ " (option_map) = {\n"
+ " nested_map: {key:\"value\" key2:[\"value2a\",\"value2b\"]},\n"
+ " }\n"
+ " (option_string) = [\"string1\",\"string2\"]\n"
+ " ];\n"
+ "}";
FieldElement field = FieldElement.builder()
.label(OPTIONAL)
.type(NamedType.create("field.type"))
.name("has_options")
.tag(3)
.addOption(OptionElement.create("option_map", Kind.MAP,
map("nested_map", map("key", "value", "key2", list("value2a", "value2b"))), true))
.addOption(OptionElement.create("option_string", Kind.LIST, list("string1", "string2"), true))
.build();
assertThat(field.options()).containsOnly( //
OptionElement.create("option_map", Kind.MAP,
map("nested_map", map("key", "value", "key2", list("value2a", "value2b"))), true),
OptionElement.create("option_string", Kind.LIST, list("string1", "string2"), true));
TypeElement expected =
MessageElement.builder().name("StructuredOption").addField(field).build();
ProtoFile protoFile = ProtoFile.builder("nestedmaps.proto").addType(expected).build();
assertThat(ProtoParser.parse("nestedmaps.proto", proto))
.isEqualTo(protoFile);
}
@Test public void optionNumericalBounds() {
String proto = ""
+ "message Test {"
+ " optional int32 default_int32 = 401 [default = 2147483647 ];\n"
+ " optional uint32 default_uint32 = 402 [default = 4294967295 ];\n"
+ " optional sint32 default_sint32 = 403 [default = -2147483648 ];\n"
+ " optional fixed32 default_fixed32 = 404 [default = 4294967295 ];\n"
+ " optional sfixed32 default_sfixed32 = 405 [default = -2147483648 ];\n"
+ " optional int64 default_int64 = 406 [default = 9223372036854775807 ];\n"
+ " optional uint64 default_uint64 = 407 [default = 18446744073709551615 ];\n"
+ " optional sint64 default_sint64 = 408 [default = -9223372036854775808 ];\n"
+ " optional fixed64 default_fixed64 = 409 [default = 18446744073709551615 ];\n"
+ " optional sfixed64 default_sfixed64 = 410 [default = -9223372036854775808 ];\n"
+ " optional bool default_bool = 411 [default = true ];\n"
+ " optional float default_float = 412 [default = 123.456e7 ];\n"
+ " optional double default_double = 413 [default = 123.456e78 ];\n"
+ " optional string default_string = 414 [default = \"çok\\a\\b\\f\\n\\r\\t\\v\\1\\01\\001\\17\\017\\176\\x1\\x01\\x11\\X1\\X01\\X11güzel\" ];\n"
+ " optional bytes default_bytes = 415 [default = \"çok\\a\\b\\f\\n\\r\\t\\v\\1\\01\\001\\17\\017\\176\\x1\\x01\\x11\\X1\\X01\\X11güzel\" ];\n"
+ " optional NestedEnum default_nested_enum = 416 [default = A ];"
+ "}";
ProtoFile expected = ProtoFile.builder("test.proto")
.addType(MessageElement.builder()
.name("Test")
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(ScalarType.INT32)
.name("default_int32")
.tag(401)
.addOption(OptionElement.create("default", Kind.NUMBER, "2147483647"))
.build())
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(ScalarType.UINT32)
.name("default_uint32")
.tag(402)
.addOption(OptionElement.create("default", Kind.NUMBER, "4294967295"))
.build())
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(ScalarType.SINT32)
.name("default_sint32")
.tag(403)
.addOption(OptionElement.create("default", Kind.NUMBER, "-2147483648"))
.build())
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(ScalarType.FIXED32)
.name("default_fixed32")
.tag(404)
.addOption(OptionElement.create("default", Kind.NUMBER, "4294967295"))
.build())
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(ScalarType.SFIXED32)
.name("default_sfixed32")
.tag(405)
.addOption(OptionElement.create("default", Kind.NUMBER, "-2147483648"))
.build())
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(ScalarType.INT64)
.name("default_int64")
.tag(406)
.addOption(OptionElement.create("default", Kind.NUMBER, "9223372036854775807"))
.build())
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(ScalarType.UINT64)
.name("default_uint64")
.tag(407)
.addOption(OptionElement.create("default", Kind.NUMBER, "18446744073709551615"))
.build())
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(ScalarType.SINT64)
.name("default_sint64")
.tag(408)
.addOption(OptionElement.create("default", Kind.NUMBER, "-9223372036854775808"))
.build())
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(ScalarType.FIXED64)
.name("default_fixed64")
.tag(409)
.addOption(OptionElement.create("default", Kind.NUMBER, "18446744073709551615"))
.build())
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(ScalarType.SFIXED64)
.name("default_sfixed64")
.tag(410)
.addOption(OptionElement.create("default", Kind.NUMBER, "-9223372036854775808"))
.build())
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(ScalarType.BOOL)
.name("default_bool")
.tag(411)
.addOption(OptionElement.create("default", Kind.BOOLEAN, "true"))
.build())
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(ScalarType.FLOAT)
.name("default_float")
.tag(412)
.addOption(OptionElement.create("default", Kind.NUMBER, "123.456e7"))
.build())
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(ScalarType.DOUBLE)
.name("default_double")
.tag(413)
.addOption(OptionElement.create("default", Kind.NUMBER, "123.456e78"))
.build())
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(ScalarType.STRING)
.name("default_string")
.tag(414)
.addOption(OptionElement.create("default", Kind.STRING,
"çok\u0007\b\f\n\r\t\u000b\u0001\u0001\u0001\u000f\u000f~\u0001\u0001\u0011\u0001\u0001\u0011güzel"))
.build())
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(ScalarType.BYTES)
.name("default_bytes")
.tag(415)
.addOption(OptionElement.create("default", Kind.STRING,
"çok\u0007\b\f\n\r\t\u000b\u0001\u0001\u0001\u000f\u000f~\u0001\u0001\u0011\u0001\u0001\u0011güzel"))
.build())
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(NamedType.create("NestedEnum"))
.name("default_nested_enum")
.tag(416)
.addOption(OptionElement.create("default", Kind.ENUM, "A"))
.build())
.build())
.build();
assertThat(ProtoParser.parse("test.proto", proto)).isEqualTo(expected);
}
@Test public void extensionWithNestedMessage() throws Exception {
String proto = ""
+ "message Foo {\n"
+ " optional int32 bar = 1 [\n"
+ " (validation.range).min = 1,\n"
+ " (validation.range).max = 100,\n"
+ " default = 20\n"
+ " ];\n"
+ "}";
FieldElement field = FieldElement.builder()
.label(OPTIONAL)
.type(INT32)
.name("bar")
.tag(1)
.addOption(OptionElement.create("validation.range", Kind.OPTION,
OptionElement.create("min", Kind.NUMBER, "1"), true))
.addOption(OptionElement.create("validation.range", Kind.OPTION,
OptionElement.create("max", Kind.NUMBER, "100"), true))
.addOption(OptionElement.create("default", Kind.NUMBER, "20"))
.build();
assertThat(field.options()).containsOnly( //
OptionElement.create("validation.range", Kind.OPTION,
OptionElement.create("min", Kind.NUMBER, "1"), true), //
OptionElement.create("validation.range", Kind.OPTION,
OptionElement.create("max", Kind.NUMBER, "100"), true), //
OptionElement.create("default", Kind.NUMBER, "20"));
TypeElement expected = MessageElement.builder().name("Foo").addField(field).build();
ProtoFile protoFile = ProtoFile.builder("foo.proto").addType(expected).build();
assertThat(ProtoParser.parse("foo.proto", proto)).isEqualTo(protoFile);
}
@Test public void noWhitespace() {
String proto = "message C {optional A.B ab = 1;}";
ProtoFile expected = ProtoFile.builder("test.proto")
.addType(MessageElement.builder()
.name("C")
.addField(FieldElement.builder()
.label(OPTIONAL)
.type(NamedType.create("A.B"))
.name("ab")
.tag(1)
.build())
.build())
.build();
assertThat(ProtoParser.parse("test.proto", proto)).isEqualTo(expected);
}
}
| 2,856 |
0 | Create_ds/protoparser/src/test/java/com/squareup | Create_ds/protoparser/src/test/java/com/squareup/protoparser/MessageElementTest.java | package com.squareup.protoparser;
import com.squareup.protoparser.OptionElement.Kind;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import org.junit.Test;
import static com.squareup.protoparser.DataType.ScalarType.BOOL;
import static com.squareup.protoparser.DataType.ScalarType.STRING;
import static com.squareup.protoparser.FieldElement.Label.ONE_OF;
import static com.squareup.protoparser.FieldElement.Label.REQUIRED;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
public class MessageElementTest {
@Test public void nameRequired() {
try {
MessageElement.builder().qualifiedName("Test").build();
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("name == null");
}
}
@Test public void nameSetsQualifiedName() {
MessageElement test = MessageElement.builder().name("Test").build();
assertThat(test.name()).isEqualTo("Test");
assertThat(test.qualifiedName()).isEqualTo("Test");
}
@Test public void nullBuilderValuesThrow() {
try {
MessageElement.builder().name(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("name == null");
}
try {
MessageElement.builder().qualifiedName(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("qualifiedName == null");
}
try {
MessageElement.builder().documentation(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("documentation == null");
}
try {
MessageElement.builder().addField(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("field == null");
}
try {
MessageElement.builder().addFields(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("fields == null");
}
try {
MessageElement.builder().addFields(Collections.<FieldElement>singleton(null));
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("field == null");
}
try {
MessageElement.builder().addType(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("type == null");
}
try {
MessageElement.builder().addTypes(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("types == null");
}
try {
MessageElement.builder().addTypes(Collections.<TypeElement>singleton(null));
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("type == null");
}
try {
MessageElement.builder().addOneOf(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("oneOf == null");
}
try {
MessageElement.builder().addOneOfs(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("oneOfs == null");
}
try {
MessageElement.builder().addOneOfs(Collections.<OneOfElement>singleton(null));
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("oneOf == null");
}
try {
MessageElement.builder().addExtensions((ExtensionsElement) null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("extensions == null");
}
try {
MessageElement.builder().addExtensions((Collection<ExtensionsElement>) null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("extensions == null");
}
try {
MessageElement.builder().addExtensions(Collections.<ExtensionsElement>singleton(null));
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("extensions == null");
}
try {
MessageElement.builder().addOption(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("option == null");
}
try {
MessageElement.builder().addOptions(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("options == null");
}
try {
MessageElement.builder().addOptions(Collections.<OptionElement>singleton(null));
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("option == null");
}
}
@Test public void emptyToSchema() {
TypeElement element = MessageElement.builder().name("Message").build();
String expected = "message Message {}\n";
assertThat(element.toSchema()).isEqualTo(expected);
}
@Test public void simpleToSchema() {
TypeElement element = MessageElement.builder()
.name("Message")
.addField(FieldElement.builder().label(REQUIRED).type(STRING).name("name").tag(1).build())
.build();
String expected = ""
+ "message Message {\n"
+ " required string name = 1;\n"
+ "}\n";
assertThat(element.toSchema()).isEqualTo(expected);
}
@Test public void addMultipleFields() {
FieldElement firstName =
FieldElement.builder().label(REQUIRED).type(STRING).name("first_name").tag(1).build();
FieldElement lastName =
FieldElement.builder().label(REQUIRED).type(STRING).name("last_name").tag(2).build();
MessageElement element = MessageElement.builder()
.name("Message")
.addFields(Arrays.asList(firstName, lastName))
.build();
assertThat(element.fields()).hasSize(2);
}
@Test public void simpleWithDocumentationToSchema() {
TypeElement element = MessageElement.builder()
.name("Message")
.documentation("Hello")
.addField(FieldElement.builder().label(REQUIRED).type(STRING).name("name").tag(1).build())
.build();
String expected = ""
+ "// Hello\n"
+ "message Message {\n"
+ " required string name = 1;\n"
+ "}\n";
assertThat(element.toSchema()).isEqualTo(expected);
}
@Test public void simpleWithOptionsToSchema() {
FieldElement field = FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name")
.tag(1)
.build();
TypeElement element = MessageElement.builder()
.name("Message")
.addField(field)
.addOption(OptionElement.create("kit", Kind.STRING, "kat"))
.build();
String expected = ""
+ "message Message {\n"
+ " option kit = \"kat\";\n"
+ "\n"
+ " required string name = 1;\n"
+ "}\n";
assertThat(element.toSchema()).isEqualTo(expected);
}
@Test public void addMultipleOptions() {
FieldElement field = FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name")
.tag(1)
.build();
OptionElement kitKat = OptionElement.create("kit", Kind.STRING, "kat");
OptionElement fooBar = OptionElement.create("foo", Kind.STRING, "bar");
MessageElement element = MessageElement.builder()
.name("Message")
.addField(field)
.addOptions(Arrays.asList(kitKat, fooBar))
.build();
assertThat(element.options()).hasSize(2);
}
@Test public void simpleWithNestedElementsToSchema() {
TypeElement element = MessageElement.builder()
.name("Message")
.addField(FieldElement.builder().label(REQUIRED).type(STRING).name("name").tag(1).build())
.addType(MessageElement.builder()
.name("Nested")
.addField(
FieldElement.builder().label(REQUIRED).type(STRING).name("name").tag(1).build())
.build())
.build();
String expected = ""
+ "message Message {\n"
+ " required string name = 1;\n"
+ "\n"
+ " message Nested {\n"
+ " required string name = 1;\n"
+ " }\n"
+ "}\n";
assertThat(element.toSchema()).isEqualTo(expected);
}
@Test public void addMultipleTypes() {
TypeElement nested1 = MessageElement.builder().name("Nested1").build();
TypeElement nested2 = MessageElement.builder().name("Nested2").build();
TypeElement element = MessageElement.builder()
.name("Message")
.addField(FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name")
.tag(1)
.build())
.addTypes(Arrays.asList(nested1, nested2))
.build();
assertThat(element.nestedElements()).hasSize(2);
}
@Test public void simpleWithExtensionsToSchema() {
TypeElement element = MessageElement.builder()
.name("Message")
.addField(FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name")
.tag(1)
.build())
.addExtensions(ExtensionsElement.create(500, 501))
.build();
String expected = ""
+ "message Message {\n"
+ " required string name = 1;\n"
+ "\n"
+ " extensions 500 to 501;\n"
+ "}\n";
assertThat(element.toSchema()).isEqualTo(expected);
}
@Test public void addMultipleExtensions() {
ExtensionsElement fives = ExtensionsElement.create(500, 501);
ExtensionsElement sixes = ExtensionsElement.create(600, 601);
MessageElement element = MessageElement.builder()
.name("Message")
.addField(FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name")
.tag(1)
.build())
.addExtensions(Arrays.asList(fives, sixes))
.build();
assertThat(element.extensions()).hasSize(2);
}
@Test public void oneOfToSchema() {
TypeElement element = MessageElement.builder()
.name("Message")
.addOneOf(OneOfElement.builder()
.name("hi")
.addField(FieldElement.builder().label(ONE_OF).type(STRING).name("name").tag(1).build())
.build())
.build();
String expected = ""
+ "message Message {\n"
+ " oneof hi {\n"
+ " string name = 1;\n"
+ " }\n"
+ "}\n";
assertThat(element.toSchema()).isEqualTo(expected);
}
@Test public void addMultipleOneOfs() {
OneOfElement hi = OneOfElement.builder()
.name("hi")
.addField(FieldElement.builder().label(ONE_OF).type(STRING).name("name").tag(1).build())
.build();
OneOfElement hey = OneOfElement.builder()
.name("hey")
.addField(FieldElement.builder().label(ONE_OF).type(STRING).name("city").tag(2).build())
.build();
MessageElement element = MessageElement.builder()
.name("Message")
.addOneOfs(Arrays.asList(hi, hey))
.build();
assertThat(element.oneOfs()).hasSize(2);
}
@Test public void multipleEverythingToSchema() {
FieldElement field1 = FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name")
.tag(1)
.build();
FieldElement field2 = FieldElement.builder()
.label(REQUIRED)
.type(BOOL)
.name("other_name")
.tag(2)
.build();
FieldElement oneOf1Field = FieldElement.builder()
.label(ONE_OF)
.type(STRING)
.name("namey")
.tag(3)
.build();
OneOfElement oneOf1 = OneOfElement.builder()
.name("thingy")
.addField(oneOf1Field)
.build();
FieldElement oneOf2Field = FieldElement.builder()
.label(ONE_OF)
.type(STRING)
.name("namer")
.tag(4)
.build();
OneOfElement oneOf2 = OneOfElement.builder()
.name("thinger")
.addField(oneOf2Field)
.build();
ExtensionsElement extensions1 = ExtensionsElement.create(500, 501);
ExtensionsElement extensions2 = ExtensionsElement.create(503, 503);
TypeElement nested = MessageElement.builder().name("Nested").addField(field1).build();
OptionElement option = OptionElement.create("kit", Kind.STRING, "kat");
TypeElement element = MessageElement.builder()
.name("Message")
.addField(field1)
.addField(field2)
.addOneOf(oneOf1)
.addOneOf(oneOf2)
.addType(nested)
.addExtensions(extensions1)
.addExtensions(extensions2)
.addOption(option)
.build();
String expected = ""
+ "message Message {\n"
+ " option kit = \"kat\";\n"
+ "\n"
+ " required string name = 1;\n"
+ " required bool other_name = 2;\n"
+ "\n"
+ " oneof thingy {\n"
+ " string namey = 3;\n"
+ " }\n"
+ " oneof thinger {\n"
+ " string namer = 4;\n"
+ " }\n"
+ "\n"
+ " extensions 500 to 501;\n"
+ " extensions 503;\n"
+ "\n"
+ " message Nested {\n"
+ " required string name = 1;\n"
+ " }\n"
+ "}\n";
assertThat(element.toSchema()).isEqualTo(expected);
}
@Test public void fieldToSchema() {
FieldElement field = FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name")
.tag(1)
.build();
String expected = "required string name = 1;\n";
assertThat(field.toSchema()).isEqualTo(expected);
}
@Test public void oneOfFieldToSchema() {
FieldElement field = FieldElement.builder()
.label(ONE_OF)
.type(STRING)
.name("name")
.tag(1)
.build();
String expected = "string name = 1;\n";
assertThat(field.toSchema()).isEqualTo(expected);
}
@Test public void fieldWithDocumentationToSchema() {
FieldElement field = FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name")
.tag(1)
.documentation("Hello")
.build();
String expected = ""
+ "// Hello\n"
+ "required string name = 1;\n";
assertThat(field.toSchema()).isEqualTo(expected);
}
@Test public void fieldWithOptionsToSchema() {
FieldElement field = FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name")
.tag(1)
.addOption(OptionElement.create("kit", Kind.STRING, "kat"))
.build();
String expected = "required string name = 1 [\n"
+ " kit = \"kat\"\n"
+ "];\n";
assertThat(field.toSchema()).isEqualTo(expected);
}
@Test public void duplicateTagValueThrows() {
FieldElement field1 = FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name1")
.tag(1)
.build();
FieldElement field2 = FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name2")
.tag(1)
.build();
try {
MessageElement.builder()
.name("Message")
.qualifiedName("example.Message")
.addField(field1)
.addField(field2)
.build();
fail();
} catch (IllegalStateException e) {
assertThat(e).hasMessage("Duplicate tag 1 in example.Message");
}
}
@Test public void duplicateTagValueOneOfThrows() {
FieldElement field1 = FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name1")
.tag(1)
.build();
FieldElement field2 = FieldElement.builder()
.label(ONE_OF)
.type(STRING)
.name("name2")
.tag(1)
.build();
OneOfElement oneOf = OneOfElement.builder().name("name3").addField(field2).build();
try {
MessageElement.builder()
.name("Message")
.qualifiedName("example.Message")
.addField(field1)
.addOneOf(oneOf)
.build();
fail();
} catch (IllegalStateException e) {
assertThat(e).hasMessage("Duplicate tag 1 in example.Message");
}
}
@Test public void oneOfFieldDisallowed() {
FieldElement field1 = FieldElement.builder()
.label(ONE_OF)
.type(STRING)
.name("name")
.tag(1)
.build();
try {
MessageElement.builder()
.name("Message")
.qualifiedName("example.Message")
.addField(field1)
.build();
fail();
} catch (IllegalStateException e) {
assertThat(e).hasMessage("Field 'name' in example.Message improperly declares itself a member of a 'oneof' group but is not.");
}
}
@Test public void duplicateEnumValueTagInScopeThrows() {
EnumConstantElement value = EnumConstantElement.builder().name("VALUE").tag(1).build();
TypeElement enum1 = EnumElement.builder()
.name("Enum1")
.qualifiedName("example.Enum1")
.addConstant(value)
.build();
TypeElement enum2 = EnumElement.builder()
.name("Enum2")
.qualifiedName("example.Enum2")
.addConstant(value)
.build();
try {
MessageElement.builder()
.name("Message")
.qualifiedName("example.Message")
.addType(enum1)
.addType(enum2)
.build();
fail();
} catch (IllegalStateException e) {
assertThat(e).hasMessage("Duplicate enum constant VALUE in scope example.Message");
}
}
@Test public void deprecatedTrue() {
FieldElement field = FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name1")
.tag(1)
.addOption(OptionElement.create("deprecated", Kind.BOOLEAN, "true"))
.build();
assertThat(field.isDeprecated()).isTrue();
}
@Test public void deprecatedFalse() {
FieldElement field = FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name1")
.tag(1)
.addOption(OptionElement.create("deprecated", Kind.BOOLEAN, "false"))
.build();
assertThat(field.isDeprecated()).isFalse();
}
@Test public void deprecatedMissing() {
FieldElement field = FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name1")
.tag(1)
.build();
assertThat(field.isDeprecated()).isFalse();
}
@Test public void packedTrue() {
FieldElement field = FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name1")
.tag(1)
.addOption(OptionElement.create("packed", Kind.BOOLEAN, "true"))
.build();
assertThat(field.isPacked()).isTrue();
}
@Test public void packedFalse() {
FieldElement field = FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name1")
.tag(1)
.addOption(OptionElement.create("packed", Kind.BOOLEAN, "false"))
.build();
assertThat(field.isPacked()).isFalse();
}
@Test public void packedMissing() {
FieldElement field = FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name1")
.tag(1)
.build();
assertThat(field.isPacked()).isFalse();
}
@Test public void defaultValue() {
FieldElement field = FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name1")
.tag(1)
.addOption(OptionElement.create("default", Kind.STRING, "foo"))
.build();
assertThat(field.getDefault().value()).isEqualTo("foo");
}
@Test public void defaultMissing() {
FieldElement field = FieldElement.builder()
.label(REQUIRED)
.type(STRING)
.name("name1")
.tag(1)
.build();
assertThat(field.getDefault()).isNull();
}
}
| 2,857 |
0 | Create_ds/protoparser/src/test/java/com/squareup | Create_ds/protoparser/src/test/java/com/squareup/protoparser/RpcElementTest.java | package com.squareup.protoparser;
import com.squareup.protoparser.DataType.NamedType;
import java.util.Collections;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
public final class RpcElementTest {
@Test public void nameRequired() {
try {
RpcElement.builder()
.requestType(NamedType.create("Foo"))
.responseType(NamedType.create("Bar"))
.build();
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("name == null");
}
}
@Test public void requestTypeRequired() {
try {
RpcElement.builder().name("Test").responseType(NamedType.create("Bar")).build();
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("requestType == null");
}
}
@Test public void responseTypeRequired() {
try {
RpcElement.builder().name("Test").requestType(NamedType.create("Foo")).build();
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("responseType == null");
}
}
@Test public void nullBuilderValuesThrow() {
try {
RpcElement.builder().name(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("name == null");
}
try {
RpcElement.builder().requestType(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("requestType == null");
}
try {
RpcElement.builder().responseType(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("responseType == null");
}
try {
RpcElement.builder().documentation(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("documentation == null");
}
try {
RpcElement.builder().addOption(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("option == null");
}
try {
RpcElement.builder().addOptions(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("options == null");
}
try {
RpcElement.builder().addOptions(Collections.<OptionElement>singleton(null));
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("option == null");
}
}
}
| 2,858 |
0 | Create_ds/protoparser/src/test/java/com/squareup | Create_ds/protoparser/src/test/java/com/squareup/protoparser/ServiceElementTest.java | package com.squareup.protoparser;
import com.squareup.protoparser.DataType.NamedType;
import com.squareup.protoparser.OptionElement.Kind;
import java.util.Arrays;
import java.util.Collections;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
public class ServiceElementTest {
@Test public void nameRequired() {
try {
ServiceElement.builder().qualifiedName("Test").build();
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("name == null");
}
}
@Test public void nameSetsQualifiedName() {
ServiceElement test = ServiceElement.builder().name("Test").build();
assertThat(test.name()).isEqualTo("Test");
assertThat(test.qualifiedName()).isEqualTo("Test");
}
@Test public void nullBuilderValuesThrow() {
try {
ServiceElement.builder().name(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("name == null");
}
try {
ServiceElement.builder().qualifiedName(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("qualifiedName == null");
}
try {
ServiceElement.builder().documentation(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("documentation == null");
}
try {
ServiceElement.builder().addRpc(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("rpc == null");
}
try {
ServiceElement.builder().addRpcs(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("rpcs == null");
}
try {
ServiceElement.builder().addRpcs(Collections.<RpcElement>singleton(null));
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("rpc == null");
}
try {
ServiceElement.builder().addOption(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("option == null");
}
try {
ServiceElement.builder().addOptions(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("options == null");
}
try {
ServiceElement.builder().addOptions(Collections.<OptionElement>singleton(null));
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("option == null");
}
}
@Test public void emptyToSchema() {
ServiceElement service = ServiceElement.builder().name("Service").build();
String expected = "service Service {}\n";
assertThat(service.toSchema()).isEqualTo(expected);
}
@Test public void singleToSchema() {
ServiceElement service = ServiceElement.builder()
.name("Service")
.addRpc(RpcElement.builder()
.name("Name")
.requestType(NamedType.create("RequestType"))
.responseType(NamedType.create("ResponseType"))
.build())
.build();
String expected = ""
+ "service Service {\n"
+ " rpc Name (RequestType) returns (ResponseType);\n"
+ "}\n";
assertThat(service.toSchema()).isEqualTo(expected);
}
@Test public void addMultipleRpcs() {
RpcElement firstName = RpcElement.builder()
.name("FirstName")
.requestType(NamedType.create("RequestType"))
.responseType(NamedType.create("ResponseType"))
.build();
RpcElement lastName = RpcElement.builder()
.name("LastName")
.requestType(NamedType.create("RequestType"))
.responseType(NamedType.create("ResponseType"))
.build();
ServiceElement service = ServiceElement.builder()
.name("Service")
.addRpcs(Arrays.asList(firstName, lastName))
.build();
assertThat(service.rpcs()).hasSize(2);
}
@Test public void singleWithOptionsToSchema() {
ServiceElement service = ServiceElement.builder()
.name("Service")
.addOption(OptionElement.create("foo", Kind.STRING, "bar"))
.addRpc(RpcElement.builder()
.name("Name")
.requestType(NamedType.create("RequestType"))
.responseType(NamedType.create("ResponseType"))
.build())
.build();
String expected = ""
+ "service Service {\n"
+ " option foo = \"bar\";\n"
+ "\n"
+ " rpc Name (RequestType) returns (ResponseType);\n"
+ "}\n";
assertThat(service.toSchema()).isEqualTo(expected);
}
@Test public void addMultipleOptions() {
OptionElement kitKat = OptionElement.create("kit", Kind.STRING, "kat");
OptionElement fooBar = OptionElement.create("foo", Kind.STRING, "bar");
ServiceElement service = ServiceElement.builder()
.name("Service")
.addOptions(Arrays.asList(kitKat, fooBar))
.addRpc(RpcElement.builder()
.name("Name")
.requestType(NamedType.create("RequestType"))
.responseType(NamedType.create("ResponseType"))
.build())
.build();
assertThat(service.options()).hasSize(2);
}
@Test public void singleWithDocumentationToSchema() {
ServiceElement service = ServiceElement.builder()
.name("Service")
.documentation("Hello")
.addRpc(RpcElement.builder()
.name("Name")
.requestType(NamedType.create("RequestType"))
.responseType(NamedType.create("ResponseType"))
.build())
.build();
String expected = ""
+ "// Hello\n"
+ "service Service {\n"
+ " rpc Name (RequestType) returns (ResponseType);\n"
+ "}\n";
assertThat(service.toSchema()).isEqualTo(expected);
}
@Test public void multipleToSchema() {
RpcElement rpc = RpcElement.builder()
.name("Name")
.requestType(NamedType.create("RequestType"))
.responseType(NamedType.create("ResponseType"))
.build();
ServiceElement service =
ServiceElement.builder().name("Service").addRpc(rpc).addRpc(rpc).build();
String expected = ""
+ "service Service {\n"
+ " rpc Name (RequestType) returns (ResponseType);\n"
+ " rpc Name (RequestType) returns (ResponseType);\n"
+ "}\n";
assertThat(service.toSchema()).isEqualTo(expected);
}
@Test public void rpcToSchema() {
RpcElement rpc = RpcElement.builder()
.name("Name")
.requestType(NamedType.create("RequestType"))
.responseType(NamedType.create("ResponseType"))
.build();
String expected = "rpc Name (RequestType) returns (ResponseType);\n";
assertThat(rpc.toSchema()).isEqualTo(expected);
}
@Test public void rpcWithDocumentationToSchema() {
RpcElement rpc = RpcElement.builder()
.name("Name")
.documentation("Hello")
.requestType(NamedType.create("RequestType"))
.responseType(NamedType.create("ResponseType"))
.build();
String expected = ""
+ "// Hello\n"
+ "rpc Name (RequestType) returns (ResponseType);\n";
assertThat(rpc.toSchema()).isEqualTo(expected);
}
@Test public void rpcWithOptionsToSchema() {
RpcElement rpc = RpcElement.builder()
.name("Name")
.requestType(NamedType.create("RequestType"))
.responseType(NamedType.create("ResponseType"))
.addOption(OptionElement.create("foo", Kind.STRING, "bar"))
.build();
String expected = ""
+ "rpc Name (RequestType) returns (ResponseType) {\n"
+ " option foo = \"bar\";\n"
+ "};\n";
assertThat(rpc.toSchema()).isEqualTo(expected);
}
}
| 2,859 |
0 | Create_ds/protoparser/src/test/java/com/squareup | Create_ds/protoparser/src/test/java/com/squareup/protoparser/OneOfElementTest.java | package com.squareup.protoparser;
import java.util.Collections;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
public final class OneOfElementTest {
@Test public void nameRequired() {
try {
OneOfElement.builder().build();
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("name == null");
}
}
@Test public void nullBuilderValuesThrow() {
try {
OneOfElement.builder().name(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("name == null");
}
try {
OneOfElement.builder().documentation(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("documentation == null");
}
try {
OneOfElement.builder().addField(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("field == null");
}
try {
OneOfElement.builder().addFields(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("fields == null");
}
try {
OneOfElement.builder().addFields(Collections.<FieldElement>singleton(null));
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessage("field == null");
}
}
}
| 2,860 |
0 | Create_ds/protoparser/src/test/java/com/squareup | Create_ds/protoparser/src/test/java/com/squareup/protoparser/ExtensionsElementTest.java | package com.squareup.protoparser;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
public class ExtensionsElementTest {
@Test public void invalidTagRangeThrows() {
try {
ExtensionsElement.create(Integer.MIN_VALUE, 500);
fail();
} catch (IllegalArgumentException e) {
assertThat(e).hasMessage("Invalid start value: -2147483648");
}
try {
ExtensionsElement.create(500, Integer.MAX_VALUE);
fail();
} catch (IllegalArgumentException e) {
assertThat(e).hasMessage("Invalid end value: 2147483647");
}
}
@Test public void singleValueToSchema() {
ExtensionsElement actual = ExtensionsElement.create(500, 500);
String expected = "extensions 500;\n";
assertThat(actual.toSchema()).isEqualTo(expected);
}
@Test public void rangeToSchema() {
ExtensionsElement actual = ExtensionsElement.create(500, 505);
String expected = "extensions 500 to 505;\n";
assertThat(actual.toSchema()).isEqualTo(expected);
}
@Test public void maxRangeToSchema() {
ExtensionsElement actual = ExtensionsElement.create(500, ProtoFile.MAX_TAG_VALUE);
String expected = "extensions 500 to max;\n";
assertThat(actual.toSchema()).isEqualTo(expected);
}
@Test public void withDocumentationToSchema() {
ExtensionsElement actual = ExtensionsElement.create(500, 500, "Hello");
String expected = ""
+ "// Hello\n"
+ "extensions 500;\n";
assertThat(actual.toSchema()).isEqualTo(expected);
}
}
| 2,861 |
0 | Create_ds/protoparser/src/main/java/com/squareup | Create_ds/protoparser/src/main/java/com/squareup/protoparser/EnumElement.java | // Copyright 2013 Square, Inc.
package com.squareup.protoparser;
import com.google.auto.value.AutoValue;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import static com.squareup.protoparser.Utils.appendDocumentation;
import static com.squareup.protoparser.Utils.appendIndented;
import static com.squareup.protoparser.Utils.checkNotNull;
import static com.squareup.protoparser.Utils.immutableCopyOf;
/** An enumerated type declaration. */
@AutoValue
public abstract class EnumElement implements TypeElement {
private static void validateTagUniqueness(String qualifiedName,
List<EnumConstantElement> constants) {
checkNotNull(qualifiedName, "qualifiedName");
Set<Integer> tags = new LinkedHashSet<>();
for (EnumConstantElement constant : constants) {
int tag = constant.tag();
if (!tags.add(tag)) {
throw new IllegalStateException("Duplicate tag " + tag + " in " + qualifiedName);
}
}
}
private static boolean parseAllowAlias(List<OptionElement> options) {
OptionElement option = OptionElement.findByName(options, "allow_alias");
return option != null && "true".equals(option.value());
}
/**
* Though not mentioned in the spec, enum names use C++ scoping rules, meaning that enum constants
* are siblings of their declaring element, not children of it.
*/
static void validateValueUniquenessInScope(String qualifiedName,
List<TypeElement> nestedElements) {
Set<String> names = new LinkedHashSet<>();
for (TypeElement nestedElement : nestedElements) {
if (nestedElement instanceof EnumElement) {
EnumElement enumElement = (EnumElement) nestedElement;
for (EnumConstantElement constant : enumElement.constants()) {
String name = constant.name();
if (!names.add(name)) {
throw new IllegalStateException(
"Duplicate enum constant " + name + " in scope " + qualifiedName);
}
}
}
}
}
public static Builder builder() {
return new Builder();
}
EnumElement() {
}
@Override public abstract String name();
@Override public abstract String qualifiedName();
@Override public abstract String documentation();
public abstract List<EnumConstantElement> constants();
@Override public abstract List<OptionElement> options();
@Override public final List<TypeElement> nestedElements() {
return Collections.emptyList(); // Enums do not allow nested type declarations.
}
@Override public final String toSchema() {
StringBuilder builder = new StringBuilder();
appendDocumentation(builder, documentation());
builder.append("enum ")
.append(name())
.append(" {");
if (!options().isEmpty()) {
builder.append('\n');
for (OptionElement option : options()) {
appendIndented(builder, option.toSchemaDeclaration());
}
}
if (!constants().isEmpty()) {
builder.append('\n');
for (EnumConstantElement constant : constants()) {
appendIndented(builder, constant.toSchema());
}
}
return builder.append("}\n").toString();
}
public static final class Builder {
private String name;
private String qualifiedName;
private String documentation = "";
private final List<EnumConstantElement> constants = new ArrayList<>();
private final List<OptionElement> options = new ArrayList<>();
private Builder() {
}
public Builder name(String name) {
this.name = checkNotNull(name, "name");
if (qualifiedName == null) {
qualifiedName = name;
}
return this;
}
public Builder qualifiedName(String qualifiedName) {
this.qualifiedName = checkNotNull(qualifiedName, "qualifiedName");
return this;
}
public Builder documentation(String documentation) {
this.documentation = checkNotNull(documentation, "documentation");
return this;
}
public Builder addConstant(EnumConstantElement constant) {
constants.add(checkNotNull(constant, "constant"));
return this;
}
public Builder addConstants(Collection<EnumConstantElement> constants) {
for (EnumConstantElement constant : checkNotNull(constants, "constants")) {
addConstant(constant);
}
return this;
}
public Builder addOption(OptionElement option) {
options.add(checkNotNull(option, "option"));
return this;
}
public Builder addOptions(Collection<OptionElement> options) {
for (OptionElement option : checkNotNull(options, "options")) {
addOption(option);
}
return this;
}
public EnumElement build() {
checkNotNull(name, "name");
checkNotNull(qualifiedName, "qualifiedName");
if (!parseAllowAlias(options)) {
validateTagUniqueness(qualifiedName, constants);
}
return new AutoValue_EnumElement(name, qualifiedName, documentation,
immutableCopyOf(constants), immutableCopyOf(options));
}
}
}
| 2,862 |
0 | Create_ds/protoparser/src/main/java/com/squareup | Create_ds/protoparser/src/main/java/com/squareup/protoparser/ServiceElement.java | // Copyright 2013 Square, Inc.
package com.squareup.protoparser;
import com.google.auto.value.AutoValue;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import static com.squareup.protoparser.Utils.appendDocumentation;
import static com.squareup.protoparser.Utils.appendIndented;
import static com.squareup.protoparser.Utils.checkNotNull;
import static com.squareup.protoparser.Utils.immutableCopyOf;
@AutoValue
public abstract class ServiceElement {
public static Builder builder() {
return new Builder();
}
public abstract String name();
public abstract String qualifiedName();
public abstract String documentation();
public abstract List<RpcElement> rpcs();
public abstract List<OptionElement> options();
ServiceElement() {
}
public final String toSchema() {
StringBuilder builder = new StringBuilder();
appendDocumentation(builder, documentation());
builder.append("service ")
.append(name())
.append(" {");
if (!options().isEmpty()) {
builder.append('\n');
for (OptionElement option : options()) {
appendIndented(builder, option.toSchemaDeclaration());
}
}
if (!rpcs().isEmpty()) {
builder.append('\n');
for (RpcElement rpc : rpcs()) {
appendIndented(builder, rpc.toSchema());
}
}
return builder.append("}\n").toString();
}
public static final class Builder {
private String name;
private String qualifiedName;
private String documentation = "";
private final List<OptionElement> options = new ArrayList<>();
private final List<RpcElement> rpcs = new ArrayList<>();
private Builder() {
}
public Builder name(String name) {
this.name = checkNotNull(name, "name");
if (qualifiedName == null) {
qualifiedName = name;
}
return this;
}
public Builder qualifiedName(String qualifiedName) {
this.qualifiedName = checkNotNull(qualifiedName, "qualifiedName");
return this;
}
public Builder documentation(String documentation) {
this.documentation = checkNotNull(documentation, "documentation");
return this;
}
public Builder addRpc(RpcElement rpc) {
rpcs.add(checkNotNull(rpc, "rpc"));
return this;
}
public Builder addRpcs(Collection<RpcElement> rpcs) {
for (RpcElement rpc : checkNotNull(rpcs, "rpcs")) {
addRpc(rpc);
}
return this;
}
public Builder addOption(OptionElement option) {
options.add(checkNotNull(option, "option"));
return this;
}
public Builder addOptions(Collection<OptionElement> options) {
for (OptionElement option : checkNotNull(options, "options")) {
addOption(option);
}
return this;
}
public ServiceElement build() {
checkNotNull(name, "name");
checkNotNull(qualifiedName, "qualifiedName");
return new AutoValue_ServiceElement(name, qualifiedName, documentation, immutableCopyOf(rpcs),
immutableCopyOf(options));
}
}
}
| 2,863 |
0 | Create_ds/protoparser/src/main/java/com/squareup | Create_ds/protoparser/src/main/java/com/squareup/protoparser/TypeElement.java | // Copyright 2013 Square, Inc.
package com.squareup.protoparser;
import java.util.List;
/** A message type or enum type declaration. */
public interface TypeElement {
String name();
String qualifiedName();
String documentation();
List<OptionElement> options();
List<TypeElement> nestedElements();
String toSchema();
}
| 2,864 |
0 | Create_ds/protoparser/src/main/java/com/squareup | Create_ds/protoparser/src/main/java/com/squareup/protoparser/DataType.java | // Copyright 2015 Square, Inc.
package com.squareup.protoparser;
import java.util.Locale;
import static com.squareup.protoparser.Utils.checkNotNull;
/**
* Representation of a scalar, map, or named type. While this class is an interface, only the
* included implementations are supported.
*/
public interface DataType {
enum Kind {
/** Type is a {@link ScalarType}. */
SCALAR,
/** Type is a {@link MapType}. */
MAP,
/** Type is a {@link NamedType}. */
NAMED
}
/** The kind of this type (and therefore implementing class). */
Kind kind();
enum ScalarType implements DataType {
ANY,
BOOL,
BYTES,
DOUBLE,
FLOAT,
FIXED32,
FIXED64,
INT32,
INT64,
SFIXED32,
SFIXED64,
SINT32,
SINT64,
STRING,
UINT32,
UINT64;
@Override public Kind kind() {
return Kind.SCALAR;
}
@Override public String toString() {
return name().toLowerCase(Locale.US);
}
}
final class MapType implements DataType {
public static MapType create(DataType keyType, DataType valueType) {
return new MapType(checkNotNull(keyType, "keyType"), checkNotNull(valueType, "valueType"));
}
private final DataType keyType;
private final DataType valueType;
private MapType(DataType keyType, DataType valueType) {
this.keyType = keyType;
this.valueType = valueType;
}
@Override public Kind kind() {
return Kind.MAP;
}
public DataType keyType() {
return keyType;
}
public DataType valueType() {
return valueType;
}
@Override public String toString() {
return "map<" + keyType + ", " + valueType + ">";
}
@Override public boolean equals(Object obj) {
if (obj == this) return true;
if (!(obj instanceof MapType)) return false;
MapType other = (MapType) obj;
return keyType.equals(other.keyType) && valueType.equals(other.valueType);
}
@Override public int hashCode() {
return keyType.hashCode() * 37 + valueType.hashCode();
}
}
final class NamedType implements DataType {
public static NamedType create(String name) {
return new NamedType(checkNotNull(name, "name"));
}
private final String name;
private NamedType(String name) {
this.name = name;
}
public String name() {
return name;
}
@Override public Kind kind() {
return Kind.NAMED;
}
@Override public String toString() {
return name;
}
@Override public boolean equals(Object obj) {
if (obj == this) return true;
if (!(obj instanceof NamedType)) return false;
NamedType other = (NamedType) obj;
return name.equals(other.name);
}
@Override public int hashCode() {
return name.hashCode();
}
}
}
| 2,865 |
0 | Create_ds/protoparser/src/main/java/com/squareup | Create_ds/protoparser/src/main/java/com/squareup/protoparser/RpcElement.java | // Copyright 2014 Square, Inc.
package com.squareup.protoparser;
import com.google.auto.value.AutoValue;
import com.squareup.protoparser.DataType.NamedType;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import static com.squareup.protoparser.Utils.appendDocumentation;
import static com.squareup.protoparser.Utils.appendIndented;
import static com.squareup.protoparser.Utils.checkNotNull;
import static com.squareup.protoparser.Utils.immutableCopyOf;
@AutoValue
public abstract class RpcElement {
public static Builder builder() {
return new Builder();
}
RpcElement() {
}
public abstract String name();
public abstract String documentation();
public abstract NamedType requestType();
public abstract NamedType responseType();
public abstract List<OptionElement> options();
public final String toSchema() {
StringBuilder builder = new StringBuilder();
appendDocumentation(builder, documentation());
builder.append("rpc ")
.append(name())
.append(" (")
.append(requestType())
.append(") returns (")
.append(responseType())
.append(')');
if (!options().isEmpty()) {
builder.append(" {\n");
for (OptionElement option : options()) {
appendIndented(builder, option.toSchemaDeclaration());
}
builder.append("}");
}
return builder.append(";\n").toString();
}
public static final class Builder {
private String name;
private String documentation = "";
private NamedType requestType;
private NamedType responseType;
private final List<OptionElement> options = new ArrayList<>();
private Builder() {
}
public Builder name(String name) {
this.name = checkNotNull(name, "name");
return this;
}
public Builder documentation(String documentation) {
this.documentation = checkNotNull(documentation, "documentation");
return this;
}
public Builder requestType(NamedType requestType) {
this.requestType = checkNotNull(requestType, "requestType");
return this;
}
public Builder responseType(NamedType responseType) {
this.responseType = checkNotNull(responseType, "responseType");
return this;
}
public Builder addOption(OptionElement option) {
options.add(checkNotNull(option, "option"));
return this;
}
public Builder addOptions(Collection<OptionElement> options) {
for (OptionElement option : checkNotNull(options, "options")) {
addOption(option);
}
return this;
}
public RpcElement build() {
checkNotNull(name, "name");
checkNotNull(requestType, "requestType");
checkNotNull(responseType, "responseType");
return new AutoValue_RpcElement(name, documentation, requestType, responseType,
immutableCopyOf(options));
}
}
}
| 2,866 |
0 | Create_ds/protoparser/src/main/java/com/squareup | Create_ds/protoparser/src/main/java/com/squareup/protoparser/Utils.java | package com.squareup.protoparser;
import java.lang.annotation.Retention;
import java.util.ArrayList;
import java.util.List;
import static java.lang.annotation.RetentionPolicy.SOURCE;
import static java.util.Collections.unmodifiableList;
final class Utils {
static void appendDocumentation(StringBuilder builder, String documentation) {
if (documentation.isEmpty()) {
return;
}
for (String line : documentation.split("\n")) {
builder.append("// ").append(line).append('\n');
}
}
static void appendIndented(StringBuilder builder, String value) {
for (String line : value.split("\n")) {
builder.append(" ").append(line).append('\n');
}
}
static <T> List<T> immutableCopyOf(List<T> list) {
return unmodifiableList(new ArrayList<>(list));
}
static <T> T checkNotNull(T value, String name) {
if (value == null) {
throw new NullPointerException(name + " == null");
}
return value;
}
static void checkArgument(boolean condition, String message, Object... messageArgs) {
if (!condition) {
if (messageArgs.length > 0) {
message = String.format(message, messageArgs);
}
throw new IllegalArgumentException(message);
}
}
@Retention(SOURCE)
@interface Nullable {
}
private Utils() {
throw new AssertionError("No instances.");
}
}
| 2,867 |
0 | Create_ds/protoparser/src/main/java/com/squareup | Create_ds/protoparser/src/main/java/com/squareup/protoparser/OptionElement.java | // Copyright 2013 Square, Inc.
package com.squareup.protoparser;
import com.google.auto.value.AutoValue;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import static com.squareup.protoparser.Utils.appendIndented;
import static com.squareup.protoparser.Utils.checkNotNull;
import static java.util.Collections.unmodifiableMap;
@AutoValue
public abstract class OptionElement {
public enum Kind {
STRING,
BOOLEAN,
NUMBER,
ENUM,
MAP,
LIST,
OPTION
}
@SuppressWarnings("unchecked")
public static Map<String, Object> optionsAsMap(List<OptionElement> options) {
Map<String, Object> map = new LinkedHashMap<>();
for (OptionElement option : options) {
String name = option.name();
Object value = option.value();
if (value instanceof String || value instanceof List) {
map.put(name, value);
} else if (value instanceof OptionElement) {
Map<String, Object> newMap = optionsAsMap(Collections.singletonList((OptionElement) value));
Object oldValue = map.get(name);
if (oldValue instanceof Map) {
Map<String, Object> oldMap = (Map<String, Object>) oldValue;
// Existing nested maps are immutable. Make a mutable copy, update, and replace.
oldMap = new LinkedHashMap<>(oldMap);
oldMap.putAll(newMap);
map.put(name, oldMap);
} else {
map.put(name, newMap);
}
} else if (value instanceof Map) {
Object oldValue = map.get(name);
if (oldValue instanceof Map) {
((Map<String, Object>) oldValue).putAll((Map<String, Object>) value);
} else {
map.put(name, value);
}
} else {
throw new AssertionError("Option value must be String, Option, List, or Map<String, ?>");
}
}
return unmodifiableMap(map);
}
/** Return the option with the specified name from the supplied list or null. */
public static OptionElement findByName(List<OptionElement> options, String name) {
checkNotNull(options, "options");
checkNotNull(name, "name");
OptionElement found = null;
for (OptionElement option : options) {
if (option.name().equals(name)) {
if (found != null) {
throw new IllegalStateException("Multiple options match name: " + name);
}
found = option;
}
}
return found;
}
public static OptionElement create(String name, Kind kind, Object value) {
return create(name, kind, value, false);
}
public static OptionElement create(String name, Kind kind, Object value,
boolean isParenthesized) {
checkNotNull(name, "name");
checkNotNull(value, "value");
return new AutoValue_OptionElement(name, kind, value, isParenthesized);
}
OptionElement() {
}
public abstract String name();
public abstract Kind kind();
public abstract Object value();
public abstract boolean isParenthesized();
public final String toSchema() {
Object value = value();
switch (kind()) {
case STRING:
return formatName() + " = \"" + value + '"';
case BOOLEAN:
case NUMBER:
case ENUM:
return formatName() + " = " + value;
case OPTION: {
StringBuilder builder = new StringBuilder();
OptionElement optionValue = (OptionElement) value;
// Treat nested options as non-parenthesized always, prevents double parentheses.
optionValue =
OptionElement.create(optionValue.name(), optionValue.kind(), optionValue.value());
builder.append(formatName()).append('.').append(optionValue.toSchema());
return builder.toString();
}
case MAP: {
StringBuilder builder = new StringBuilder();
builder.append(formatName()).append(" = {\n");
//noinspection unchecked
Map<String, ?> valueMap = (Map<String, ?>) value;
formatOptionMap(builder, valueMap);
builder.append('}');
return builder.toString();
}
case LIST: {
StringBuilder builder = new StringBuilder();
builder.append(formatName()).append(" = [\n");
//noinspection unchecked
List<OptionElement> optionList = (List<OptionElement>) value;
formatOptionList(builder, optionList);
builder.append(']');
return builder.toString();
}
default:
throw new AssertionError();
}
}
public final String toSchemaDeclaration() {
return "option " + toSchema() + ";\n";
}
static void formatOptionList(StringBuilder builder, List<OptionElement> optionList) {
for (int i = 0, count = optionList.size(); i < count; i++) {
String endl = (i < count - 1) ? "," : "";
appendIndented(builder, optionList.get(i).toSchema() + endl);
}
}
static void formatOptionMap(StringBuilder builder, Map<String, ?> valueMap) {
List<? extends Map.Entry<String, ?>> entries = new ArrayList<>(valueMap.entrySet());
for (int i = 0, count = entries.size(); i < count; i++) {
Map.Entry<String, ?> entry = entries.get(i);
String endl = (i < count - 1) ? "," : "";
appendIndented(builder,
entry.getKey() + ": " + formatOptionMapValue(entry.getValue()) + endl);
}
}
static String formatOptionMapValue(Object value) {
checkNotNull(value, "value == null");
if (value instanceof String) {
return "\"" + value + '"';
}
if (value instanceof Map) {
StringBuilder builder = new StringBuilder().append("{\n");
//noinspection unchecked
Map<String, ?> map = (Map<String, ?>) value;
formatOptionMap(builder, map);
return builder.append('}').toString();
}
if (value instanceof List) {
StringBuilder builder = new StringBuilder().append("[\n");
List<?> list = (List<?>) value;
for (int i = 0, count = list.size(); i < count; i++) {
String endl = (i < count - 1) ? "," : "";
appendIndented(builder, formatOptionMapValue(list.get(i)) + endl);
}
return builder.append("]").toString();
}
return value.toString();
}
private String formatName() {
if (isParenthesized()) {
return '(' + name() + ')';
} else {
return name();
}
}
}
| 2,868 |
0 | Create_ds/protoparser/src/main/java/com/squareup | Create_ds/protoparser/src/main/java/com/squareup/protoparser/MessageElement.java | // Copyright 2013 Square, Inc.
package com.squareup.protoparser;
import com.google.auto.value.AutoValue;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import static com.squareup.protoparser.Utils.appendDocumentation;
import static com.squareup.protoparser.Utils.appendIndented;
import static com.squareup.protoparser.Utils.checkNotNull;
import static com.squareup.protoparser.Utils.immutableCopyOf;
@AutoValue
public abstract class MessageElement implements TypeElement {
static void validateFieldTagUniqueness(String qualifiedName, List<FieldElement> fields,
List<OneOfElement> oneOfs) {
List<FieldElement> allFields = new ArrayList<>(fields);
for (OneOfElement oneOf : oneOfs) {
allFields.addAll(oneOf.fields());
}
Set<Integer> tags = new LinkedHashSet<>();
for (FieldElement field : allFields) {
int tag = field.tag();
if (!tags.add(tag)) {
throw new IllegalStateException("Duplicate tag " + tag + " in " + qualifiedName);
}
}
}
static void validateFieldLabel(String qualifiedName, List<FieldElement> fields) {
for (FieldElement field : fields) {
if (field.label() == FieldElement.Label.ONE_OF) {
throw new IllegalStateException("Field '"
+ field.name()
+ "' in "
+ qualifiedName
+ " improperly declares itself a member of a 'oneof' group but is not.");
}
}
}
public static Builder builder() {
return new Builder();
}
MessageElement() {
}
@Override public abstract String name();
@Override public abstract String qualifiedName();
@Override public abstract String documentation();
public abstract List<FieldElement> fields();
public abstract List<OneOfElement> oneOfs();
@Override public abstract List<TypeElement> nestedElements();
public abstract List<ExtensionsElement> extensions();
@Override public abstract List<OptionElement> options();
@Override public final String toSchema() {
StringBuilder builder = new StringBuilder();
appendDocumentation(builder, documentation());
builder.append("message ")
.append(name())
.append(" {");
if (!options().isEmpty()) {
builder.append('\n');
for (OptionElement option : options()) {
appendIndented(builder, option.toSchemaDeclaration());
}
}
if (!fields().isEmpty()) {
builder.append('\n');
for (FieldElement field : fields()) {
appendIndented(builder, field.toSchema());
}
}
if (!oneOfs().isEmpty()) {
builder.append('\n');
for (OneOfElement oneOf : oneOfs()) {
appendIndented(builder, oneOf.toSchema());
}
}
if (!extensions().isEmpty()) {
builder.append('\n');
for (ExtensionsElement extension : extensions()) {
appendIndented(builder, extension.toSchema());
}
}
if (!nestedElements().isEmpty()) {
builder.append('\n');
for (TypeElement type : nestedElements()) {
appendIndented(builder, type.toSchema());
}
}
return builder.append("}\n").toString();
}
public static final class Builder {
private String name;
private String qualifiedName;
private String documentation = "";
private final List<FieldElement> fields = new ArrayList<>();
private final List<OneOfElement> oneOfs = new ArrayList<>();
private final List<TypeElement> nestedElements = new ArrayList<>();
private final List<ExtensionsElement> extensions = new ArrayList<>();
private final List<OptionElement> options = new ArrayList<>();
private Builder() {
}
public Builder name(String name) {
this.name = checkNotNull(name, "name");
if (qualifiedName == null) {
qualifiedName = name;
}
return this;
}
public Builder qualifiedName(String qualifiedName) {
this.qualifiedName = checkNotNull(qualifiedName, "qualifiedName");
return this;
}
public Builder documentation(String documentation) {
this.documentation = checkNotNull(documentation, "documentation");
return this;
}
public Builder addField(FieldElement field) {
fields.add(checkNotNull(field, "field"));
return this;
}
public Builder addFields(Collection<FieldElement> fields) {
for (FieldElement field : checkNotNull(fields, "fields")) {
addField(field);
}
return this;
}
public Builder addOneOf(OneOfElement oneOf) {
oneOfs.add(checkNotNull(oneOf, "oneOf"));
return this;
}
public Builder addOneOfs(Collection<OneOfElement> oneOfs) {
for (OneOfElement oneOf : checkNotNull(oneOfs, "oneOfs")) {
addOneOf(oneOf);
}
return this;
}
public Builder addType(TypeElement type) {
nestedElements.add(checkNotNull(type, "type"));
return this;
}
public Builder addTypes(Collection<TypeElement> types) {
for (TypeElement type : checkNotNull(types, "types")) {
addType(type);
}
return this;
}
public Builder addExtensions(ExtensionsElement extensions) {
this.extensions.add(checkNotNull(extensions, "extensions"));
return this;
}
public Builder addExtensions(Collection<ExtensionsElement> extensions) {
for (ExtensionsElement extension : checkNotNull(extensions, "extensions")) {
addExtensions(extension);
}
return this;
}
public Builder addOption(OptionElement option) {
options.add(checkNotNull(option, "option"));
return this;
}
public Builder addOptions(Collection<OptionElement> options) {
for (OptionElement option : checkNotNull(options, "options")) {
addOption(option);
}
return this;
}
public MessageElement build() {
checkNotNull(name, "name");
checkNotNull(qualifiedName, "qualifiedName");
validateFieldTagUniqueness(qualifiedName, fields, oneOfs);
validateFieldLabel(qualifiedName, fields);
EnumElement.validateValueUniquenessInScope(qualifiedName, nestedElements);
return new AutoValue_MessageElement(name, qualifiedName, documentation,
immutableCopyOf(fields), immutableCopyOf(oneOfs), immutableCopyOf(nestedElements),
immutableCopyOf(extensions), immutableCopyOf(options));
}
}
}
| 2,869 |
0 | Create_ds/protoparser/src/main/java/com/squareup | Create_ds/protoparser/src/main/java/com/squareup/protoparser/OneOfElement.java | // Copyright 2014 Square, Inc.
package com.squareup.protoparser;
import com.google.auto.value.AutoValue;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import static com.squareup.protoparser.Utils.appendDocumentation;
import static com.squareup.protoparser.Utils.appendIndented;
import static com.squareup.protoparser.Utils.checkNotNull;
import static com.squareup.protoparser.Utils.immutableCopyOf;
@AutoValue
public abstract class OneOfElement {
public static Builder builder() {
return new Builder();
}
OneOfElement() {
}
public abstract String name();
public abstract String documentation();
public abstract List<FieldElement> fields();
public final String toSchema() {
StringBuilder builder = new StringBuilder();
appendDocumentation(builder, documentation());
builder.append("oneof ").append(name()).append(" {");
if (!fields().isEmpty()) {
builder.append('\n');
for (FieldElement field : fields()) {
appendIndented(builder, field.toSchema());
}
}
return builder.append("}\n").toString();
}
public static final class Builder {
private String name;
private String documentation = "";
private final List<FieldElement> fields = new ArrayList<>();
private Builder() {
}
public Builder name(String name) {
this.name = checkNotNull(name, "name");
return this;
}
public Builder documentation(String documentation) {
this.documentation = checkNotNull(documentation, "documentation");
return this;
}
public Builder addField(FieldElement field) {
fields.add(checkNotNull(field, "field"));
return this;
}
public Builder addFields(Collection<FieldElement> fields) {
for (FieldElement field : checkNotNull(fields, "fields")) {
addField(field);
}
return this;
}
public OneOfElement build() {
checkNotNull(name, "name");
// TODO check non-empty?
return new AutoValue_OneOfElement(name, documentation, immutableCopyOf(fields));
}
}
}
| 2,870 |
0 | Create_ds/protoparser/src/main/java/com/squareup | Create_ds/protoparser/src/main/java/com/squareup/protoparser/ProtoParser.java | // Copyright 2013 Square, Inc.
package com.squareup.protoparser;
import com.google.auto.value.AutoValue;
import com.squareup.protoparser.DataType.MapType;
import com.squareup.protoparser.DataType.NamedType;
import com.squareup.protoparser.DataType.ScalarType;
import java.io.CharArrayWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import static com.squareup.protoparser.ProtoFile.Syntax.PROTO_2;
import static com.squareup.protoparser.ProtoFile.Syntax.PROTO_3;
import static java.nio.charset.StandardCharsets.UTF_8;
/** Basic parser for {@code .proto} schema declarations. */
public final class ProtoParser {
/** Parse a {@code .proto} definition file. */
public static ProtoFile parseUtf8(File file) throws IOException {
try (InputStream is = new FileInputStream(file)) {
return parseUtf8(file.getPath(), is);
}
}
/** Parse a {@code .proto} definition file. */
public static ProtoFile parseUtf8(Path path) throws IOException {
try (Reader reader = Files.newBufferedReader(path, UTF_8)) {
return parse(path.toString(), reader);
}
}
/** Parse a named {@code .proto} schema. The {@code InputStream} is not closed. */
public static ProtoFile parseUtf8(String name, InputStream is) throws IOException {
return parse(name, new InputStreamReader(is, UTF_8));
}
/** Parse a named {@code .proto} schema. The {@code Reader} is not closed. */
public static ProtoFile parse(String name, Reader reader) throws IOException {
CharArrayWriter writer = new CharArrayWriter();
char[] buffer = new char[1024];
int count;
while ((count = reader.read(buffer)) != -1) {
writer.write(buffer, 0, count);
}
return new ProtoParser(name, writer.toCharArray()).readProtoFile();
}
/** Parse a named {@code .proto} schema. */
public static ProtoFile parse(String name, String data) {
return new ProtoParser(name, data.toCharArray()).readProtoFile();
}
private final String filePath;
private final char[] data;
private final ProtoFile.Builder fileBuilder;
/** Our cursor within the document. {@code data[pos]} is the next character to be read. */
private int pos;
/** The number of newline characters encountered thus far. */
private int line;
/** The index of the most recent newline character. */
private int lineStart;
/** Output package name, or null if none yet encountered. */
private String packageName;
/** The current package name + nested type names, separated by dots. */
private String prefix = "";
ProtoParser(String filePath, char[] data) {
this.filePath = filePath;
this.data = data;
this.fileBuilder = ProtoFile.builder(filePath);
}
ProtoFile readProtoFile() {
while (true) {
String documentation = readDocumentation();
if (pos == data.length) {
return fileBuilder.build();
}
Object declaration = readDeclaration(documentation, Context.FILE);
if (declaration instanceof TypeElement) {
fileBuilder.addType((TypeElement) declaration);
} else if (declaration instanceof ServiceElement) {
fileBuilder.addService((ServiceElement) declaration);
} else if (declaration instanceof OptionElement) {
fileBuilder.addOption((OptionElement) declaration);
} else if (declaration instanceof ExtendElement) {
fileBuilder.addExtendDeclaration((ExtendElement) declaration);
}
}
}
private Object readDeclaration(String documentation, Context context) {
// Skip unnecessary semicolons, occasionally used after a nested message declaration.
if (peekChar() == ';') {
pos++;
return null;
}
String label = readWord();
if (label.equals("package")) {
if (!context.permitsPackage()) throw unexpected("'package' in " + context);
if (packageName != null) throw unexpected("too many package names");
packageName = readName();
fileBuilder.packageName(packageName);
prefix = packageName + ".";
if (readChar() != ';') throw unexpected("expected ';'");
return null;
} else if (label.equals("import")) {
if (!context.permitsImport()) throw unexpected("'import' in " + context);
String importString = readString();
if ("public".equals(importString)) {
fileBuilder.addPublicDependency(readString());
} else {
fileBuilder.addDependency(importString);
}
if (readChar() != ';') throw unexpected("expected ';'");
return null;
} else if (label.equals("syntax")) {
if (!context.permitsSyntax()) throw unexpected("'syntax' in " + context);
if (readChar() != '=') throw unexpected("expected '='");
String syntax = readQuotedString();
switch (syntax) {
case "proto2":
fileBuilder.syntax(PROTO_2);
break;
case "proto3":
fileBuilder.syntax(PROTO_3);
break;
default:
throw unexpected("'syntax' must be 'proto2' or 'proto3'. Found: " + syntax);
}
if (readChar() != ';') throw unexpected("expected ';'");
return null;
} else if (label.equals("option")) {
OptionElement result = readOption('=');
if (readChar() != ';') throw unexpected("expected ';'");
return result;
} else if (label.equals("message")) {
return readMessage(documentation);
} else if (label.equals("enum")) {
return readEnumElement(documentation);
} else if (label.equals("service")) {
return readService(documentation);
} else if (label.equals("extend")) {
return readExtend(documentation);
} else if (label.equals("rpc")) {
if (!context.permitsRpc()) throw unexpected("'rpc' in " + context);
return readRpc(documentation);
} else if (label.equals("required") || label.equals("optional") || label.equals("repeated")) {
if (!context.permitsField()) throw unexpected("fields must be nested");
FieldElement.Label labelEnum = FieldElement.Label.valueOf(label.toUpperCase(Locale.US));
return readField(documentation, labelEnum);
} else if (label.equals("oneof")) {
if (!context.permitsOneOf()) throw unexpected("'oneof' must be nested in message");
return readOneOf(documentation);
} else if (label.equals("extensions")) {
if (!context.permitsExtensions()) throw unexpected("'extensions' must be nested");
return readExtensions(documentation);
} else if (context == Context.ENUM) {
if (readChar() != '=') throw unexpected("expected '='");
EnumConstantElement.Builder builder = EnumConstantElement.builder()
.name(label)
.tag(readInt());
if (peekChar() == '[') {
readChar();
while (true) {
builder.addOption(readOption('='));
char c = readChar();
if (c == ']') {
break;
}
if (c != ',') {
throw unexpected("Expected ',' or ']");
}
}
}
if (readChar() != ';') throw unexpected("expected ';'");
documentation = tryAppendTrailingDocumentation(documentation);
return builder.documentation(documentation).build();
} else {
throw unexpected("unexpected label: " + label);
}
}
/** Reads a message declaration. */
private MessageElement readMessage(String documentation) {
String name = readName();
MessageElement.Builder builder = MessageElement.builder()
.name(name)
.qualifiedName(prefix + name)
.documentation(documentation);
String previousPrefix = prefix;
prefix = prefix + name + ".";
if (readChar() != '{') throw unexpected("expected '{'");
while (true) {
String nestedDocumentation = readDocumentation();
if (peekChar() == '}') {
pos++;
break;
}
Object declared = readDeclaration(nestedDocumentation, Context.MESSAGE);
if (declared instanceof FieldElement) {
builder.addField((FieldElement) declared);
} else if (declared instanceof OneOfElement) {
builder.addOneOf((OneOfElement) declared);
} else if (declared instanceof TypeElement) {
builder.addType((TypeElement) declared);
} else if (declared instanceof ExtensionsElement) {
builder.addExtensions((ExtensionsElement) declared);
} else if (declared instanceof OptionElement) {
builder.addOption((OptionElement) declared);
} else if (declared instanceof ExtendElement) {
// Extend declarations always add in a global scope regardless of nesting.
fileBuilder.addExtendDeclaration((ExtendElement) declared);
}
}
prefix = previousPrefix;
return builder.build();
}
/** Reads an extend declaration. */
private ExtendElement readExtend(String documentation) {
String name = readName();
String qualifiedName = name;
if (!name.contains(".") && packageName != null) {
qualifiedName = packageName + "." + name;
}
ExtendElement.Builder builder = ExtendElement.builder()
.name(name)
.qualifiedName(qualifiedName)
.documentation(documentation);
if (readChar() != '{') throw unexpected("expected '{'");
while (true) {
String nestedDocumentation = readDocumentation();
if (peekChar() == '}') {
pos++;
break;
}
Object declared = readDeclaration(nestedDocumentation, Context.EXTEND);
if (declared instanceof FieldElement) {
builder.addField((FieldElement) declared);
}
}
return builder.build();
}
/** Reads a service declaration and returns it. */
private ServiceElement readService(String documentation) {
String name = readName();
ServiceElement.Builder builder = ServiceElement.builder()
.name(name)
.qualifiedName(prefix + name)
.documentation(documentation);
if (readChar() != '{') throw unexpected("expected '{'");
while (true) {
String rpcDocumentation = readDocumentation();
if (peekChar() == '}') {
pos++;
break;
}
Object declared = readDeclaration(rpcDocumentation, Context.SERVICE);
if (declared instanceof RpcElement) {
builder.addRpc((RpcElement) declared);
} else if (declared instanceof OptionElement) {
builder.addOption((OptionElement) declared);
}
}
return builder.build();
}
/** Reads an enumerated type declaration and returns it. */
private EnumElement readEnumElement(String documentation) {
String name = readName();
EnumElement.Builder builder = EnumElement.builder()
.name(name)
.qualifiedName(prefix + name)
.documentation(documentation);
if (readChar() != '{') throw unexpected("expected '{'");
while (true) {
String valueDocumentation = readDocumentation();
if (peekChar() == '}') {
pos++;
break;
}
Object declared = readDeclaration(valueDocumentation, Context.ENUM);
if (declared instanceof EnumConstantElement) {
builder.addConstant((EnumConstantElement) declared);
} else if (declared instanceof OptionElement) {
builder.addOption((OptionElement) declared);
}
}
return builder.build();
}
/** Reads an field declaration and returns it. */
private FieldElement readField(String documentation, FieldElement.Label label) {
DataType type = readDataType();
String name = readName();
if (readChar() != '=') throw unexpected("expected '='");
int tag = readInt();
FieldElement.Builder builder = FieldElement.builder()
.label(label)
.type(type)
.name(name)
.tag(tag);
if (peekChar() == '[') {
pos++;
while (true) {
builder.addOption(readOption('='));
// Check for optional ',' or closing ']'
char c = peekChar();
if (c == ']') {
pos++;
break;
} else if (c == ',') {
pos++;
}
}
}
if (readChar() != ';') {
throw unexpected("expected ';'");
}
documentation = tryAppendTrailingDocumentation(documentation);
return builder.documentation(documentation).build();
}
private OneOfElement readOneOf(String documentation) {
OneOfElement.Builder builder = OneOfElement.builder()
.name(readName())
.documentation(documentation);
if (readChar() != '{') throw unexpected("expected '{'");
while (true) {
String nestedDocumentation = readDocumentation();
if (peekChar() == '}') {
pos++;
break;
}
builder.addField(readField(nestedDocumentation, FieldElement.Label.ONE_OF));
}
return builder.build();
}
/** Reads extensions like "extensions 101;" or "extensions 101 to max;". */
private ExtensionsElement readExtensions(String documentation) {
int start = readInt(); // Range start.
int end = start;
if (peekChar() != ';') {
if (!"to".equals(readWord())) throw unexpected("expected ';' or 'to'");
String s = readWord(); // Range end.
if (s.equals("max")) {
end = ProtoFile.MAX_TAG_VALUE;
} else {
end = Integer.parseInt(s);
}
}
if (readChar() != ';') throw unexpected("expected ';'");
return ExtensionsElement.create(start, end, documentation);
}
/** Reads a option containing a name, an '=' or ':', and a value. */
private OptionElement readOption(char keyValueSeparator) {
boolean isExtension = (peekChar() == '[');
boolean isParenthesized = (peekChar() == '(');
String name = readName(); // Option name.
if (isExtension) {
name = "[" + name + "]";
}
String subName = null;
char c = readChar();
if (c == '.') {
// Read nested field name. For example "baz" in "(foo.bar).baz = 12".
subName = readName();
c = readChar();
}
if (c != keyValueSeparator) {
throw unexpected("expected '" + keyValueSeparator + "' in option");
}
OptionKindAndValue kindAndValue = readKindAndValue();
OptionElement.Kind kind = kindAndValue.kind();
Object value = kindAndValue.value();
if (subName != null) {
value = OptionElement.create(subName, kind, value);
kind = OptionElement.Kind.OPTION;
}
return OptionElement.create(name, kind, value, isParenthesized);
}
@AutoValue
abstract static class OptionKindAndValue {
static OptionKindAndValue of(OptionElement.Kind kind, Object value) {
return new AutoValue_ProtoParser_OptionKindAndValue(kind, value);
}
abstract OptionElement.Kind kind();
abstract Object value();
}
/** Reads a value that can be a map, list, string, number, boolean or enum. */
private OptionKindAndValue readKindAndValue() {
char peeked = peekChar();
switch (peeked) {
case '{':
return OptionKindAndValue.of(OptionElement.Kind.MAP, readMap('{', '}', ':'));
case '[':
return OptionKindAndValue.of(OptionElement.Kind.LIST, readList());
case '"':
return OptionKindAndValue.of(OptionElement.Kind.STRING, readString());
default:
if (Character.isDigit(peeked) || peeked == '-') {
return OptionKindAndValue.of(OptionElement.Kind.NUMBER, readWord());
}
String word = readWord();
switch (word) {
case "true":
return OptionKindAndValue.of(OptionElement.Kind.BOOLEAN, "true");
case "false":
return OptionKindAndValue.of(OptionElement.Kind.BOOLEAN, "false");
default:
return OptionKindAndValue.of(OptionElement.Kind.ENUM, word);
}
}
}
/**
* Returns a map of string keys and values. This is similar to a JSON object,
* with '{' and '}' surrounding the map, ':' separating keys from values, and
* ',' separating entries.
*/
@SuppressWarnings("unchecked")
private Map<String, Object> readMap(char openBrace, char closeBrace, char keyValueSeparator) {
if (readChar() != openBrace) throw new AssertionError();
Map<String, Object> result = new LinkedHashMap<>();
while (true) {
if (peekChar() == closeBrace) {
// If we see the close brace, finish immediately. This handles {}/[] and ,}/,] cases.
pos++;
return result;
}
OptionElement option = readOption(keyValueSeparator);
String name = option.name();
Object value = option.value();
if (value instanceof OptionElement) {
@SuppressWarnings("unchecked")
Map<String, Object> nested = (Map<String, Object>) result.get(name);
if (nested == null) {
nested = new LinkedHashMap<>();
result.put(name, nested);
}
OptionElement valueOption = (OptionElement) value;
nested.put(valueOption.name(), valueOption.value());
} else {
// Add the value(s) to any previous values with the same key
Object previous = result.get(name);
if (previous == null) {
result.put(name, value);
} else if (previous instanceof List) {
// Add to previous List
addToList((List<Object>) previous, value);
} else {
List<Object> newList = new ArrayList<>();
newList.add(previous);
addToList(newList, value);
result.put(name, newList);
}
}
// ',' separator is optional, skip if present
if (peekChar() == ',') {
pos++;
}
}
}
/**
* Adds an object or objects to a List.
*/
private void addToList(List<Object> list, Object value) {
if (value instanceof List) {
list.addAll((List) value);
} else {
list.add(value);
}
}
/**
* Returns a list of values. This is similar to JSON with '[' and ']'
* surrounding the list and ',' separating values.
*/
private List<Object> readList() {
if (readChar() != '[') throw new AssertionError();
List<Object> result = new ArrayList<>();
while (true) {
if (peekChar() == ']') {
// If we see the close brace, finish immediately. This handles [] and ,] cases.
pos++;
return result;
}
result.add(readKindAndValue().value());
char c = peekChar();
if (c == ',') {
pos++;
} else if (c != ']') {
throw unexpected("expected ',' or ']'");
}
}
}
/** Reads an rpc and returns it. */
private RpcElement readRpc(String documentation) {
RpcElement.Builder builder = RpcElement.builder()
.name(readName())
.documentation(documentation);
if (readChar() != '(') throw unexpected("expected '('");
DataType requestType = readDataType();
if (!(requestType instanceof NamedType)) {
throw unexpected("expected message but was " + requestType);
}
builder.requestType((NamedType) requestType);
if (readChar() != ')') throw unexpected("expected ')'");
if (!readWord().equals("returns")) throw unexpected("expected 'returns'");
if (readChar() != '(') throw unexpected("expected '('");
DataType responseType = readDataType();
if (!(responseType instanceof NamedType)) {
throw unexpected("expected message but was " + responseType);
}
builder.responseType((NamedType) responseType);
if (readChar() != ')') throw unexpected("expected ')'");
if (peekChar() == '{') {
pos++;
while (true) {
String rpcDocumentation = readDocumentation();
if (peekChar() == '}') {
pos++;
break;
}
Object declared = readDeclaration(rpcDocumentation, Context.RPC);
if (declared instanceof OptionElement) {
builder.addOption((OptionElement) declared);
}
}
} else if (readChar() != ';') throw unexpected("expected ';'");
return builder.build();
}
/** Reads a non-whitespace character and returns it. */
private char readChar() {
char result = peekChar();
pos++;
return result;
}
/**
* Peeks a non-whitespace character and returns it. The only difference
* between this and {@code readChar} is that this doesn't consume the char.
*/
private char peekChar() {
skipWhitespace(true);
if (pos == data.length) throw unexpected("unexpected end of file");
return data[pos];
}
/** Reads a quoted or unquoted string and returns it. */
private String readString() {
skipWhitespace(true);
return peekChar() == '"' ? readQuotedString() : readWord();
}
private String readQuotedString() {
if (readChar() != '"') throw new AssertionError();
StringBuilder result = new StringBuilder();
while (pos < data.length) {
char c = data[pos++];
if (c == '"') return result.toString();
if (c == '\\') {
if (pos == data.length) throw unexpected("unexpected end of file");
c = data[pos++];
switch (c) {
case 'a': c = 0x7; break;
case 'b': c = '\b'; break;
case 'f': c = '\f'; break;
case 'n': c = '\n'; break;
case 'r': c = '\r'; break;
case 't': c = '\t'; break;
case 'v': c = 0xb; break;
case 'x':case 'X':
c = readNumericEscape(16, 2);
break;
case '0':case '1':case '2':case '3':case '4':case '5':case '6':case '7':
--pos;
c = readNumericEscape(8, 3);
break;
default:
// use char as-is
break;
}
}
result.append(c);
if (c == '\n') newline();
}
throw unexpected("unterminated string");
}
private char readNumericEscape(int radix, int len) {
int value = -1;
for (int endPos = Math.min(pos + len, data.length); pos < endPos; pos++) {
int digit = hexDigit(data[pos]);
if (digit == -1 || digit >= radix) break;
if (value < 0) {
value = digit;
} else {
value = value * radix + digit;
}
}
if (value < 0) throw unexpected("expected a digit after \\x or \\X");
return (char) value;
}
private int hexDigit(char c) {
if (c >= '0' && c <= '9') return c - '0';
else if (c >= 'a' && c <= 'f') return c - 'a' + 10;
else if (c >= 'A' && c <= 'F') return c - 'A' + 10;
else return -1;
}
/** Reads a (paren-wrapped), [square-wrapped] or naked symbol name. */
private String readName() {
String optionName;
char c = peekChar();
if (c == '(') {
pos++;
optionName = readWord();
if (readChar() != ')') throw unexpected("expected ')'");
} else if (c == '[') {
pos++;
optionName = readWord();
if (readChar() != ']') throw unexpected("expected ']'");
} else {
optionName = readWord();
}
return optionName;
}
/** Reads a scalar, map, or type name. */
private DataType readDataType() {
String name = readWord();
switch (name) {
case "map":
if (readChar() != '<') throw unexpected("expected '<'");
DataType keyType = readDataType();
if (readChar() != ',') throw unexpected("expected ','");
DataType valueType = readDataType();
if (readChar() != '>') throw unexpected("expected '>'");
return MapType.create(keyType, valueType);
case "any":
return ScalarType.ANY;
case "bool":
return ScalarType.BOOL;
case "bytes":
return ScalarType.BYTES;
case "double":
return ScalarType.DOUBLE;
case "float":
return ScalarType.FLOAT;
case "fixed32":
return ScalarType.FIXED32;
case "fixed64":
return ScalarType.FIXED64;
case "int32":
return ScalarType.INT32;
case "int64":
return ScalarType.INT64;
case "sfixed32":
return ScalarType.SFIXED32;
case "sfixed64":
return ScalarType.SFIXED64;
case "sint32":
return ScalarType.SINT32;
case "sint64":
return ScalarType.SINT64;
case "string":
return ScalarType.STRING;
case "uint32":
return ScalarType.UINT32;
case "uint64":
return ScalarType.UINT64;
default:
return NamedType.create(name);
}
}
/** Reads a non-empty word and returns it. */
private String readWord() {
skipWhitespace(true);
int start = pos;
while (pos < data.length) {
char c = data[pos];
if ((c >= 'a' && c <= 'z')
|| (c >= 'A' && c <= 'Z')
|| (c >= '0' && c <= '9')
|| (c == '_')
|| (c == '-')
|| (c == '.')) {
pos++;
} else {
break;
}
}
if (start == pos) throw unexpected("expected a word");
return new String(data, start, pos - start);
}
/** Reads an integer and returns it. */
private int readInt() {
String tag = readWord();
try {
int radix = 10;
if (tag.startsWith("0x") || tag.startsWith("0X")) {
tag = tag.substring("0x".length());
radix = 16;
}
return Integer.valueOf(tag, radix);
} catch (Exception e) {
throw unexpected("expected an integer but was " + tag);
}
}
/**
* Like {@link #skipWhitespace}, but this returns a string containing all
* comment text. By convention, comments before a declaration document that
* declaration.
*/
private String readDocumentation() {
String result = null;
while (true) {
skipWhitespace(false);
if (pos == data.length || data[pos] != '/') {
return result != null ? result : "";
}
String comment = readComment();
result = (result == null) ? comment : (result + "\n" + comment);
}
}
/** Reads a comment and returns its body. */
private String readComment() {
if (pos == data.length || data[pos] != '/') throw new AssertionError();
pos++;
int commentType = pos < data.length ? data[pos++] : -1;
if (commentType == '*') {
StringBuilder result = new StringBuilder();
boolean startOfLine = true;
for (; pos + 1 < data.length; pos++) {
char c = data[pos];
if (c == '*' && data[pos + 1] == '/') {
pos += 2;
return result.toString().trim();
}
if (c == '\n') {
result.append('\n');
newline();
startOfLine = true;
} else if (!startOfLine) {
result.append(c);
} else if (c == '*') {
if (data[pos + 1] == ' ') {
pos += 1; // Skip a single leading space, if present.
}
startOfLine = false;
} else if (!Character.isWhitespace(c)) {
result.append(c);
startOfLine = false;
}
}
throw unexpected("unterminated comment");
} else if (commentType == '/') {
if (pos < data.length && data[pos] == ' ') {
pos += 1; // Skip a single leading space, if present.
}
int start = pos;
while (pos < data.length) {
char c = data[pos++];
if (c == '\n') {
newline();
break;
}
}
return new String(data, start, pos - 1 - start);
} else {
throw unexpected("unexpected '/'");
}
}
private String tryAppendTrailingDocumentation(String documentation) {
// Search for a '/' character ignoring spaces and tabs.
while (pos < data.length) {
char c = data[pos];
if (c == ' ' || c == '\t') {
pos++;
} else if (c == '/') {
pos++;
break;
} else {
// Not a whitespace or comment-starting character. Return original documentation.
return documentation;
}
}
if (pos == data.length || (data[pos] != '/' && data[pos] != '*')) {
pos--; // Backtrack to start of comment.
throw unexpected("expected '//' or '/*'");
}
boolean isStar = data[pos] == '*';
pos++;
if (pos < data.length && data[pos] == ' ') {
pos++; // Skip a single leading space, if present.
}
int start = pos;
int end;
if (isStar) {
// Consume star comment until it closes on the same line.
while (true) {
if (pos == data.length || data[pos] == '\n') {
throw unexpected("trailing comment must be closed on the same line");
}
if (data[pos] == '*' && pos + 1 < data.length && data[pos + 1] == '/') {
end = pos - 1; // The character before '*'.
pos += 2; // Skip to the character after '/'.
break;
}
pos++;
}
// Ensure nothing follows a trailing star comment.
while (pos < data.length) {
char c = data[pos++];
if (c == '\n') {
newline();
break;
}
if (c != ' ' && c != '\t') {
throw unexpected("no syntax may follow trailing comment");
}
}
} else {
// Consume comment until newline.
while (true) {
if (pos == data.length) {
end = pos - 1;
break;
}
char c = data[pos++];
if (c == '\n') {
newline();
end = pos - 2; // Account for stepping past the newline.
break;
}
}
}
// Remove trailing whitespace.
while (end > start && (data[end] == ' ' || data[end] == '\t')) {
end--;
}
if (end == start) {
return documentation;
}
String trailingDocumentation = new String(data, start, end - start + 1);
if (documentation.isEmpty()) {
return trailingDocumentation;
}
return documentation + '\n' + trailingDocumentation;
}
/**
* Skips whitespace characters and optionally comments. When this returns,
* either {@code pos == data.length} or a non-whitespace character.
*/
private void skipWhitespace(boolean skipComments) {
while (pos < data.length) {
char c = data[pos];
if (c == ' ' || c == '\t' || c == '\r' || c == '\n') {
pos++;
if (c == '\n') newline();
} else if (skipComments && c == '/') {
readComment();
} else {
break;
}
}
}
/** Call this every time a '\n' is encountered. */
private void newline() {
line++;
lineStart = pos;
}
private int column() {
return pos - lineStart + 1;
}
private int line() {
return line + 1;
}
private RuntimeException unexpected(String message) {
throw new IllegalStateException(
String.format("Syntax error in %s at %d:%d: %s", filePath, line(), column(), message));
}
enum Context {
FILE,
MESSAGE,
ENUM,
RPC,
EXTEND,
SERVICE;
public boolean permitsPackage() {
return this == FILE;
}
public boolean permitsSyntax() {
return this == FILE;
}
public boolean permitsImport() {
return this == FILE;
}
public boolean permitsField() {
return this == MESSAGE || this == EXTEND;
}
public boolean permitsExtensions() {
return this != FILE;
}
public boolean permitsRpc() {
return this == SERVICE;
}
public boolean permitsOneOf() {
return this == MESSAGE;
}
}
}
| 2,871 |
0 | Create_ds/protoparser/src/main/java/com/squareup | Create_ds/protoparser/src/main/java/com/squareup/protoparser/EnumConstantElement.java | // Copyright 2014 Square, Inc.
package com.squareup.protoparser;
import com.google.auto.value.AutoValue;
import java.util.ArrayList;
import java.util.List;
import static com.squareup.protoparser.OptionElement.formatOptionList;
import static com.squareup.protoparser.Utils.appendDocumentation;
import static com.squareup.protoparser.Utils.checkNotNull;
import static com.squareup.protoparser.Utils.immutableCopyOf;
/** An enum constant. */
@AutoValue
public abstract class EnumConstantElement {
public static Builder builder() {
return new Builder();
}
EnumConstantElement() {
}
public abstract String name();
public abstract int tag();
public abstract String documentation();
public abstract List<OptionElement> options();
public final String toSchema() {
StringBuilder builder = new StringBuilder();
appendDocumentation(builder, documentation());
builder.append(name())
.append(" = ")
.append(tag());
if (!options().isEmpty()) {
builder.append(" [\n");
formatOptionList(builder, options());
builder.append(']');
}
return builder.append(";\n").toString();
}
public static final class Builder {
private String name;
private Integer tag;
private String documentation = "";
private final List<OptionElement> options = new ArrayList<>();
private Builder() {
}
public Builder name(String name) {
this.name = checkNotNull(name, "name");
return this;
}
public Builder tag(int tag) {
this.tag = tag;
return this;
}
public Builder documentation(String documentation) {
this.documentation = checkNotNull(documentation, "documentation");
return this;
}
public Builder addOption(OptionElement option) {
options.add(checkNotNull(option, "option"));
return this;
}
public EnumConstantElement build() {
checkNotNull(name, "name");
checkNotNull(tag, "tag");
return new AutoValue_EnumConstantElement(name, tag, documentation, immutableCopyOf(options));
}
}
}
| 2,872 |
0 | Create_ds/protoparser/src/main/java/com/squareup | Create_ds/protoparser/src/main/java/com/squareup/protoparser/ProtoFile.java | // Copyright 2013 Square, Inc.
package com.squareup.protoparser;
import com.google.auto.value.AutoValue;
import com.squareup.protoparser.Utils.Nullable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import static com.squareup.protoparser.Utils.checkNotNull;
import static com.squareup.protoparser.Utils.immutableCopyOf;
/** A single {@code .proto} file. */
@AutoValue
public abstract class ProtoFile {
static final int MIN_TAG_VALUE = 1;
static final int MAX_TAG_VALUE = (1 << 29) - 1; // 536,870,911
private static final int RESERVED_TAG_VALUE_START = 19000;
private static final int RESERVED_TAG_VALUE_END = 19999;
/** Syntax version. */
public enum Syntax {
PROTO_2("proto2"),
PROTO_3("proto3");
private final String name;
Syntax(String name) {
this.name = name;
}
}
/** True if the supplied value is in the valid tag range and not reserved. */
static boolean isValidTag(int value) {
return (value >= MIN_TAG_VALUE && value < RESERVED_TAG_VALUE_START)
|| (value > RESERVED_TAG_VALUE_END && value <= MAX_TAG_VALUE);
}
public static Builder builder(String filePath) {
return new Builder(checkNotNull(filePath, "filePath"));
}
ProtoFile() {
}
public abstract String filePath();
@Nullable public abstract String packageName();
@Nullable public abstract Syntax syntax();
public abstract List<String> dependencies();
public abstract List<String> publicDependencies();
public abstract List<TypeElement> typeElements();
public abstract List<ServiceElement> services();
public abstract List<ExtendElement> extendDeclarations();
public abstract List<OptionElement> options();
public final String toSchema() {
StringBuilder builder = new StringBuilder();
if (!filePath().isEmpty()) {
builder.append("// ").append(filePath()).append('\n');
}
if (packageName() != null) {
builder.append("package ").append(packageName()).append(";\n");
}
if (syntax() != null) {
builder.append("syntax \"").append(syntax().name).append("\";\n");
}
if (!dependencies().isEmpty() || !publicDependencies().isEmpty()) {
builder.append('\n');
for (String dependency : dependencies()) {
builder.append("import \"").append(dependency).append("\";\n");
}
for (String publicDependency : publicDependencies()) {
builder.append("import public \"").append(publicDependency).append("\";\n");
}
}
if (!options().isEmpty()) {
builder.append('\n');
for (OptionElement option : options()) {
builder.append(option.toSchemaDeclaration());
}
}
if (!typeElements().isEmpty()) {
builder.append('\n');
for (TypeElement typeElement : typeElements()) {
builder.append(typeElement.toSchema());
}
}
if (!extendDeclarations().isEmpty()) {
builder.append('\n');
for (ExtendElement extendDeclaration : extendDeclarations()) {
builder.append(extendDeclaration.toSchema());
}
}
if (!services().isEmpty()) {
builder.append('\n');
for (ServiceElement service : services()) {
builder.append(service.toSchema());
}
}
return builder.toString();
}
public static final class Builder {
private final String filePath;
private String packageName;
private Syntax syntax;
private final List<String> dependencies = new ArrayList<>();
private final List<String> publicDependencies = new ArrayList<>();
private final List<TypeElement> types = new ArrayList<>();
private final List<ServiceElement> services = new ArrayList<>();
private final List<ExtendElement> extendDeclarations = new ArrayList<>();
private final List<OptionElement> options = new ArrayList<>();
Builder(String filePath) {
this.filePath = filePath;
}
public Builder packageName(String packageName) {
this.packageName = checkNotNull(packageName, "packageName");
return this;
}
public Builder syntax(Syntax syntax) {
this.syntax = checkNotNull(syntax, "syntax");
return this;
}
public Builder addDependency(String dependency) {
dependencies.add(checkNotNull(dependency, "dependency"));
return this;
}
public Builder addDependencies(Collection<String> dependencies) {
for (String dependency : checkNotNull(dependencies, "dependencies")) {
addDependency(dependency);
}
return this;
}
public Builder addPublicDependency(String dependency) {
publicDependencies.add(checkNotNull(dependency, "dependency"));
return this;
}
public Builder addPublicDependencies(Collection<String> dependencies) {
for (String dependency : checkNotNull(dependencies, "dependencies")) {
addPublicDependency(dependency);
}
return this;
}
public Builder addType(TypeElement type) {
types.add(checkNotNull(type, "type"));
return this;
}
public Builder addTypes(Collection<TypeElement> types) {
for (TypeElement type : checkNotNull(types, "types")) {
addType(type);
}
return this;
}
public Builder addService(ServiceElement service) {
services.add(checkNotNull(service, "service"));
return this;
}
public Builder addServices(Collection<ServiceElement> services) {
for (ServiceElement service : checkNotNull(services, "services")) {
addService(service);
}
return this;
}
public Builder addExtendDeclaration(ExtendElement extend) {
extendDeclarations.add(checkNotNull(extend, "extend"));
return this;
}
public Builder addExtendDeclarations(Collection<ExtendElement> extendDeclarations) {
for (ExtendElement extendDeclaration : checkNotNull(extendDeclarations,
"extendDeclarations")) {
addExtendDeclaration(extendDeclaration);
}
return this;
}
public Builder addOption(OptionElement option) {
options.add(checkNotNull(option, "option"));
return this;
}
public Builder addOptions(Collection<OptionElement> options) {
for (OptionElement option : checkNotNull(options, "options")) {
addOption(option);
}
return this;
}
public ProtoFile build() {
return new AutoValue_ProtoFile(filePath, packageName, syntax, immutableCopyOf(dependencies),
immutableCopyOf(publicDependencies), immutableCopyOf(types), immutableCopyOf(services),
immutableCopyOf(extendDeclarations), immutableCopyOf(options));
}
}
}
| 2,873 |
0 | Create_ds/protoparser/src/main/java/com/squareup | Create_ds/protoparser/src/main/java/com/squareup/protoparser/ExtensionsElement.java | // Copyright 2013 Square, Inc.
package com.squareup.protoparser;
import com.google.auto.value.AutoValue;
import static com.squareup.protoparser.ProtoFile.isValidTag;
import static com.squareup.protoparser.Utils.appendDocumentation;
import static com.squareup.protoparser.Utils.checkArgument;
@AutoValue
public abstract class ExtensionsElement {
public static ExtensionsElement create(int start, int end) {
return create(start, end, "");
}
public static ExtensionsElement create(int start, int end, String documentation) {
checkArgument(isValidTag(start), "Invalid start value: %s", start);
checkArgument(isValidTag(end), "Invalid end value: %s", end);
return new AutoValue_ExtensionsElement(documentation, start, end);
}
ExtensionsElement() {
}
public abstract String documentation();
public abstract int start();
public abstract int end();
public final String toSchema() {
StringBuilder builder = new StringBuilder();
appendDocumentation(builder, documentation());
builder.append("extensions ")
.append(start());
if (start() != end()) {
builder.append(" to ");
if (end() < ProtoFile.MAX_TAG_VALUE) {
builder.append(end());
} else {
builder.append("max");
}
}
return builder.append(";\n").toString();
}
}
| 2,874 |
0 | Create_ds/protoparser/src/main/java/com/squareup | Create_ds/protoparser/src/main/java/com/squareup/protoparser/FieldElement.java | // Copyright 2014 Square, Inc.
package com.squareup.protoparser;
import com.google.auto.value.AutoValue;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Locale;
import static com.squareup.protoparser.ProtoFile.isValidTag;
import static com.squareup.protoparser.Utils.appendDocumentation;
import static com.squareup.protoparser.Utils.appendIndented;
import static com.squareup.protoparser.Utils.checkArgument;
import static com.squareup.protoparser.Utils.checkNotNull;
import static com.squareup.protoparser.Utils.immutableCopyOf;
@AutoValue
public abstract class FieldElement {
public static Builder builder() {
return new Builder();
}
FieldElement() {
}
public abstract Label label();
/**
* Returns the type of this field. May be a message type name, an enum type
* name, or a <a href="https://developers.google.com/protocol-buffers/docs/proto#scalar">
* scalar value type</a> like {@code int64} or {@code bytes}.
*/
public abstract DataType type();
public abstract String name();
public abstract int tag();
public abstract String documentation();
public abstract List<OptionElement> options();
/** Returns true when the {@code deprecated} option is present and set to true. */
public final boolean isDeprecated() {
OptionElement deprecatedOption = OptionElement.findByName(options(), "deprecated");
return deprecatedOption != null && "true".equals(deprecatedOption.value());
}
/** Returns true when the {@code packed} option is present and set to true. */
public final boolean isPacked() {
OptionElement packedOption = OptionElement.findByName(options(), "packed");
return packedOption != null && "true".equals(packedOption.value());
}
/** Returns the {@code default} option value or {@code null}. */
public final OptionElement getDefault() {
OptionElement defaultOption = OptionElement.findByName(options(), "default");
return defaultOption != null ? defaultOption : null;
}
public final String toSchema() {
StringBuilder builder = new StringBuilder();
appendDocumentation(builder, documentation());
if (label() != Label.ONE_OF) {
builder.append(label().name().toLowerCase(Locale.US)).append(' ');
}
builder.append(type())
.append(' ')
.append(name())
.append(" = ")
.append(tag());
if (!options().isEmpty()) {
builder.append(" [\n");
for (OptionElement option : options()) {
appendIndented(builder, option.toSchema());
}
builder.append(']');
}
return builder.append(";\n").toString();
}
public enum Label {
OPTIONAL, REQUIRED, REPEATED,
/** Indicates the field is a member of a {@code oneof} block. */
ONE_OF
}
public static final class Builder {
private Label label;
private DataType type;
private String name;
private Integer tag;
private String documentation = "";
private final List<OptionElement> options = new ArrayList<>();
private Builder() {
}
public Builder label(Label label) {
this.label = checkNotNull(label, "label");
return this;
}
public Builder type(DataType type) {
this.type = checkNotNull(type, "type");
return this;
}
public Builder name(String name) {
this.name = checkNotNull(name, "name");
return this;
}
public Builder tag(int tag) {
this.tag = tag;
return this;
}
public Builder documentation(String documentation) {
this.documentation = checkNotNull(documentation, "documentation");
return this;
}
public Builder addOption(OptionElement option) {
options.add(checkNotNull(option, "option"));
return this;
}
public Builder addOptions(Collection<OptionElement> options) {
for (OptionElement option : checkNotNull(options, "options")) {
addOption(option);
}
return this;
}
public FieldElement build() {
checkNotNull(label, "label");
checkNotNull(type, "type");
checkNotNull(name, "name");
checkNotNull(tag, "tag");
checkArgument(isValidTag(tag), "Illegal tag value: %s", tag);
return new AutoValue_FieldElement(label, type, name, tag, documentation,
immutableCopyOf(options));
}
}
}
| 2,875 |
0 | Create_ds/protoparser/src/main/java/com/squareup | Create_ds/protoparser/src/main/java/com/squareup/protoparser/ExtendElement.java | // Copyright 2013 Square, Inc.
package com.squareup.protoparser;
import com.google.auto.value.AutoValue;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import static com.squareup.protoparser.MessageElement.validateFieldTagUniqueness;
import static com.squareup.protoparser.Utils.appendDocumentation;
import static com.squareup.protoparser.Utils.appendIndented;
import static com.squareup.protoparser.Utils.checkNotNull;
import static com.squareup.protoparser.Utils.immutableCopyOf;
@AutoValue
public abstract class ExtendElement {
public static Builder builder() {
return new Builder();
}
ExtendElement() {
}
public abstract String name();
public abstract String qualifiedName();
public abstract String documentation();
public abstract List<FieldElement> fields();
public final String toSchema() {
StringBuilder builder = new StringBuilder();
appendDocumentation(builder, documentation());
builder.append("extend ")
.append(name())
.append(" {");
if (!fields().isEmpty()) {
builder.append('\n');
for (FieldElement field : fields()) {
appendIndented(builder, field.toSchema());
}
}
return builder.append("}\n").toString();
}
public static final class Builder {
private String name;
private String qualifiedName;
private String documentation = "";
private final List<FieldElement> fields = new ArrayList<>();
private Builder() {
}
public Builder name(String name) {
this.name = checkNotNull(name, "name");
if (qualifiedName == null) {
qualifiedName = name;
}
return this;
}
public Builder qualifiedName(String qualifiedName) {
this.qualifiedName = checkNotNull(qualifiedName, "qualifiedName");
return this;
}
public Builder documentation(String documentation) {
this.documentation = checkNotNull(documentation, "documentation");
return this;
}
public Builder addField(FieldElement field) {
fields.add(checkNotNull(field, "field"));
return this;
}
public Builder addFields(Collection<FieldElement> fields) {
for (FieldElement field : checkNotNull(fields, "fields")) {
addField(field);
}
return this;
}
public ExtendElement build() {
checkNotNull(name, "name");
checkNotNull(qualifiedName, "qualifiedName");
validateFieldTagUniqueness(qualifiedName, fields, Collections.<OneOfElement>emptyList());
return new AutoValue_ExtendElement(name, qualifiedName, documentation,
immutableCopyOf(fields));
}
}
}
| 2,876 |
0 | Create_ds/flink-connector-opensearch/flink-sql-connector-opensearch/src/test/java/org/apache/flink/connector | Create_ds/flink-connector-opensearch/flink-sql-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch/PackagingITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.connector.opensearch;
import org.apache.flink.packaging.PackagingTestUtils;
import org.apache.flink.table.factories.Factory;
import org.apache.flink.test.resources.ResourceTestUtils;
import org.junit.jupiter.api.Test;
import java.nio.file.Path;
import java.util.Arrays;
class PackagingITCase {
@Test
void testPackaging() throws Exception {
final Path jar =
ResourceTestUtils.getResource(".*/flink-sql-connector-opensearch-[^/]*\\.jar");
PackagingTestUtils.assertJarContainsOnlyFilesMatching(
jar,
Arrays.asList(
"META-INF/",
"org/apache/flink/connector/base/",
"org/apache/flink/connector/opensearch/",
"org/apache/flink/opensearch/",
"org/apache/flink/streaming/connectors/opensearch/"));
PackagingTestUtils.assertJarContainsServiceEntry(jar, Factory.class);
}
}
| 2,877 |
0 | Create_ds/flink-connector-opensearch/flink-connector-opensearch-e2e-tests/src/test/java/org/apache/flink/streaming | Create_ds/flink-connector-opensearch/flink-connector-opensearch-e2e-tests/src/test/java/org/apache/flink/streaming/tests/OpensearchSinkE2ECase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.tests;
import org.apache.flink.connector.opensearch.test.DockerImageVersions;
import org.apache.flink.connector.testframe.container.FlinkContainerTestEnvironment;
import org.apache.flink.connector.testframe.external.DefaultContainerizedExternalSystem;
import org.apache.flink.connector.testframe.external.ExternalSystemDataReader;
import org.apache.flink.connector.testframe.junit.annotations.TestContext;
import org.apache.flink.connector.testframe.junit.annotations.TestEnv;
import org.apache.flink.connector.testframe.junit.annotations.TestExternalSystem;
import org.apache.flink.connector.testframe.junit.annotations.TestSemantics;
import org.apache.flink.connector.testframe.testsuites.SinkTestSuiteBase;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.test.resources.ResourceTestUtils;
import org.opensearch.testcontainers.OpensearchContainer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.utility.DockerImageName;
import java.time.Duration;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.apache.flink.connector.testframe.utils.CollectIteratorAssertions.assertThat;
import static org.apache.flink.runtime.testutils.CommonTestUtils.waitUntilCondition;
/** End to end test for OpensearchSink based on connector testing framework. */
@SuppressWarnings("unused")
public class OpensearchSinkE2ECase extends SinkTestSuiteBase<ComparableTuple2<Integer, String>> {
private static final Logger LOG = LoggerFactory.getLogger(OpensearchSinkE2ECase.class);
private static final int READER_RETRY_ATTEMPTS = 10;
private static final int READER_TIMEOUT = -1; // Not used
@TestSemantics
CheckpointingMode[] semantics = new CheckpointingMode[] {CheckpointingMode.EXACTLY_ONCE};
@TestEnv FlinkContainerTestEnvironment flink = new FlinkContainerTestEnvironment(1, 6);
public OpensearchSinkE2ECase() throws Exception {}
@TestExternalSystem
DefaultContainerizedExternalSystem<OpensearchContainer> opensearch =
DefaultContainerizedExternalSystem.builder()
.fromContainer(
new OpensearchContainer(
DockerImageName.parse(DockerImageVersions.OPENSEARCH_1))
.withEnv(
"cluster.routing.allocation.disk.threshold_enabled",
"false")
.withNetworkAliases("opensearch"))
.bindWithFlinkContainer(flink.getFlinkContainers().getJobManager())
.build();
@TestContext
OpensearchSinkExternalContextFactory contextFactory =
new OpensearchSinkExternalContextFactory(
opensearch.getContainer(),
Arrays.asList(
ResourceTestUtils.getResource(
"dependencies/opensearch-end-to-end-test.jar")
.toAbsolutePath()
.toUri()
.toURL(),
ResourceTestUtils.getResource(
"dependencies/flink-connector-test-utils.jar")
.toAbsolutePath()
.toUri()
.toURL()));
@Override
protected void checkResultWithSemantic(
ExternalSystemDataReader<ComparableTuple2<Integer, String>> reader,
List<ComparableTuple2<Integer, String>> testData,
CheckpointingMode semantic)
throws Exception {
waitUntilCondition(
() -> {
try {
List<ComparableTuple2<Integer, String>> result =
reader.poll(Duration.ofMillis(READER_TIMEOUT));
assertThat(sort(result).iterator())
.matchesRecordsFromSource(
Collections.singletonList(sort(testData)), semantic);
return true;
} catch (Throwable t) {
LOG.warn("Polled results not as expected", t);
return false;
}
},
5000,
READER_RETRY_ATTEMPTS);
}
private static <T extends Comparable<T>> List<T> sort(List<T> list) {
Collections.sort(list);
return list;
}
}
| 2,878 |
0 | Create_ds/flink-connector-opensearch/flink-connector-opensearch-e2e-tests/src/test/java/org/apache/flink/streaming | Create_ds/flink-connector-opensearch/flink-connector-opensearch-e2e-tests/src/test/java/org/apache/flink/streaming/tests/OpensearchDataReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.tests;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.testframe.external.ExternalSystemDataReader;
import java.time.Duration;
import java.util.List;
import static org.apache.flink.util.Preconditions.checkNotNull;
/** Opensearch data reader. */
public class OpensearchDataReader implements ExternalSystemDataReader<Tuple2<Integer, String>> {
private final OpensearchTestClient client;
private final String indexName;
private final int pageLength;
public OpensearchDataReader(OpensearchTestClient client, String indexName, int pageLength) {
this.client = checkNotNull(client);
this.indexName = checkNotNull(indexName);
this.pageLength = pageLength;
}
@Override
public List<Tuple2<Integer, String>> poll(Duration timeout) {
client.refreshIndex(indexName);
return client.fetchAll(indexName, "key", 0, pageLength, true);
}
@Override
public void close() throws Exception {
client.close();
}
}
| 2,879 |
0 | Create_ds/flink-connector-opensearch/flink-connector-opensearch-e2e-tests/src/test/java/org/apache/flink/streaming | Create_ds/flink-connector-opensearch/flink-connector-opensearch-e2e-tests/src/test/java/org/apache/flink/streaming/tests/OpensearchTestClient.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.tests;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.http.HttpHost;
import org.opensearch.OpenSearchException;
import org.opensearch.action.admin.indices.delete.DeleteIndexRequest;
import org.opensearch.action.admin.indices.refresh.RefreshRequest;
import org.opensearch.action.search.SearchRequest;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.client.RequestOptions;
import org.opensearch.client.RestClient;
import org.opensearch.client.RestHighLevelClient;
import org.opensearch.client.indices.CreateIndexRequest;
import org.opensearch.client.indices.GetIndexRequest;
import org.opensearch.common.settings.Settings;
import org.opensearch.rest.RestStatus;
import org.opensearch.search.SearchHit;
import org.opensearch.search.builder.SearchSourceBuilder;
import org.opensearch.search.sort.SortOrder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import static org.apache.flink.util.Preconditions.checkNotNull;
/** The type Opensearch test client. */
public class OpensearchTestClient implements AutoCloseable {
private static final Logger LOG = LoggerFactory.getLogger(OpensearchTestClient.class);
private final RestHighLevelClient restClient;
/**
* Instantiates a new Opensearch client.
*
* @param address The address to access Opensearch from the host machine (outside of the
* containerized environment).
*/
public OpensearchTestClient(String address) {
checkNotNull(address);
this.restClient = new RestHighLevelClient(RestClient.builder(HttpHost.create(address)));
}
public void deleteIndex(String indexName) {
DeleteIndexRequest request = new DeleteIndexRequest(indexName);
try {
restClient.indices().delete(request, RequestOptions.DEFAULT);
} catch (IOException e) {
LOG.error("Cannot delete index {}", indexName, e);
}
// This is needed to avoid race conditions between tests that reuse the same index
refreshIndex(indexName);
}
public void refreshIndex(String indexName) {
RefreshRequest refresh = new RefreshRequest(indexName);
try {
restClient.indices().refresh(refresh, RequestOptions.DEFAULT);
} catch (IOException e) {
LOG.error("Cannot delete index {}", indexName, e);
} catch (OpenSearchException e) {
if (e.status() == RestStatus.NOT_FOUND) {
LOG.info("Index {} not found", indexName);
}
}
}
public void createIndexIfDoesNotExist(String indexName, int shards, int replicas) {
GetIndexRequest request = new GetIndexRequest(indexName);
CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName);
createIndexRequest.settings(
Settings.builder()
.put("index.number_of_shards", shards)
.put("index.number_of_replicas", replicas));
try {
boolean exists = restClient.indices().exists(request, RequestOptions.DEFAULT);
if (!exists) {
restClient.indices().create(createIndexRequest, RequestOptions.DEFAULT);
} else {
LOG.info("Index already exists {}", indexName);
}
} catch (IOException e) {
LOG.error("Cannot create index {}", indexName, e);
}
}
@Override
public void close() throws Exception {
restClient.close();
}
public List<Tuple2<Integer, String>> fetchAll(
String indexName, String sortField, int from, int pageLength, boolean trackTotalHits) {
try {
SearchResponse response =
restClient.search(
new SearchRequest(indexName)
.source(
new SearchSourceBuilder()
.sort(sortField, SortOrder.ASC)
.from(from)
.size(pageLength)
.trackTotalHits(trackTotalHits)),
RequestOptions.DEFAULT);
SearchHit[] searchHits = response.getHits().getHits();
return Arrays.stream(searchHits)
.map(
searchHit ->
ComparableTuple2.of(
Integer.valueOf(searchHit.getId()),
searchHit.getSourceAsMap().get("value").toString()))
.collect(Collectors.toList());
} catch (IOException e) {
LOG.error("Fetching records failed", e);
return Collections.emptyList();
}
}
}
| 2,880 |
0 | Create_ds/flink-connector-opensearch/flink-connector-opensearch-e2e-tests/src/test/java/org/apache/flink/streaming | Create_ds/flink-connector-opensearch/flink-connector-opensearch-e2e-tests/src/test/java/org/apache/flink/streaming/tests/OpensearchSinkExternalContextFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.tests;
import org.apache.flink.connector.testframe.external.ExternalContextFactory;
import org.opensearch.testcontainers.OpensearchContainer;
import java.net.URL;
import java.util.List;
import static org.apache.flink.util.Preconditions.checkNotNull;
/** Opensearch sink external context factory. */
class OpensearchSinkExternalContextFactory
implements ExternalContextFactory<OpensearchSinkExternalContext> {
/** The OpensearchContainer container. */
private final OpensearchContainer opensearchContainer;
/** The connector jars. */
private final List<URL> connectorJars;
/**
* Instantiates a new Opensearch sink external context factory.
*
* @param opensearchContainer The Opensearch container.
* @param connectorJars The connector jars.
*/
OpensearchSinkExternalContextFactory(
OpensearchContainer opensearchContainer, List<URL> connectorJars) {
this.opensearchContainer = checkNotNull(opensearchContainer);
this.connectorJars = checkNotNull(connectorJars);
}
@Override
public OpensearchSinkExternalContext createExternalContext(String testName) {
return new OpensearchSinkExternalContext(
opensearchContainer.getHttpHostAddress(),
opensearchContainer.getNetworkAliases().get(0)
+ ":"
+ opensearchContainer.getExposedPorts().get(0),
connectorJars);
}
}
| 2,881 |
0 | Create_ds/flink-connector-opensearch/flink-connector-opensearch-e2e-tests/src/test/java/org/apache/flink/streaming | Create_ds/flink-connector-opensearch/flink-connector-opensearch-e2e-tests/src/test/java/org/apache/flink/streaming/tests/OpensearchSinkExternalContext.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.tests;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.connector.sink2.Sink;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.opensearch.sink.OpensearchSinkBuilder;
import org.apache.flink.connector.testframe.external.ExternalSystemDataReader;
import org.apache.flink.connector.testframe.external.sink.DataStreamSinkV2ExternalContext;
import org.apache.flink.connector.testframe.external.sink.TestingSinkSettings;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.http.HttpHost;
import java.net.URL;
import java.util.List;
import java.util.Random;
import java.util.concurrent.ThreadLocalRandom;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.apache.flink.util.Preconditions.checkNotNull;
class OpensearchSinkExternalContext
implements DataStreamSinkV2ExternalContext<Tuple2<Integer, String>> {
/** The constant INDEX_NAME_PREFIX. */
private static final String INDEX_NAME_PREFIX = "os-index";
private static final int RANDOM_STRING_MAX_LENGTH = 50;
private static final int NUM_RECORDS_UPPER_BOUND = 500;
private static final int NUM_RECORDS_LOWER_BOUND = 100;
private static final int BULK_BUFFER = 100;
private static final int PAGE_LENGTH = NUM_RECORDS_UPPER_BOUND + 1;
/** The index name. */
private final String indexName;
/** The address reachable from Flink (internal to the testing environment). */
private final String addressInternal;
/** The connector jar paths. */
private final List<URL> connectorJarPaths;
/** The client. */
private final OpensearchTestClient client;
/**
* Instantiates a new Opensearch sink context base.
*
* @param addressExternal The address to access Opensearch from the host machine (outside of the
* containerized environment).
* @param addressInternal The address to access Opensearch from Flink. When running in a
* containerized environment, should correspond to the network alias that resolves within
* the environment's network together with the exposed port.
* @param connectorJarPaths The connector jar paths.
*/
OpensearchSinkExternalContext(
String addressExternal, String addressInternal, List<URL> connectorJarPaths) {
this.addressInternal = checkNotNull(addressInternal);
this.connectorJarPaths = checkNotNull(connectorJarPaths);
this.client = new OpensearchTestClient(addressExternal);
this.indexName =
INDEX_NAME_PREFIX + "-" + ThreadLocalRandom.current().nextLong(Long.MAX_VALUE);
}
@Override
public Sink<Tuple2<Integer, String>> createSink(TestingSinkSettings sinkSettings)
throws UnsupportedOperationException {
client.createIndexIfDoesNotExist(indexName, 1, 0);
return new OpensearchSinkBuilder<Tuple2<Integer, String>>()
.setHosts(HttpHost.create(addressInternal))
.setEmitter(new OpensearchTestEmitter(indexName))
.setBulkFlushMaxActions(BULK_BUFFER)
.build();
}
@Override
public ExternalSystemDataReader<Tuple2<Integer, String>> createSinkDataReader(
TestingSinkSettings sinkSettings) {
return new OpensearchDataReader(client, indexName, PAGE_LENGTH);
}
@Override
public List<Tuple2<Integer, String>> generateTestData(
TestingSinkSettings sinkSettings, long seed) {
Random random = new Random(seed);
int recordNum =
random.nextInt(NUM_RECORDS_UPPER_BOUND - NUM_RECORDS_LOWER_BOUND)
+ NUM_RECORDS_LOWER_BOUND;
return IntStream.range(0, recordNum)
.boxed()
.map(
i -> {
int valueLength = random.nextInt(RANDOM_STRING_MAX_LENGTH) + 1;
String value = RandomStringUtils.random(valueLength, true, true);
return ComparableTuple2.of(i, value);
})
.collect(Collectors.toList());
}
@Override
public void close() throws Exception {
client.deleteIndex(indexName);
}
@Override
public List<URL> getConnectorJarPaths() {
return connectorJarPaths;
}
@Override
public TypeInformation<Tuple2<Integer, String>> getProducedType() {
return TypeInformation.of(new TypeHint<Tuple2<Integer, String>>() {});
}
@Override
public String toString() {
return "Opensearch sink context.";
}
}
| 2,882 |
0 | Create_ds/flink-connector-opensearch/flink-connector-opensearch-e2e-tests/src/main/java/org/apache/flink/streaming | Create_ds/flink-connector-opensearch/flink-connector-opensearch-e2e-tests/src/main/java/org/apache/flink/streaming/tests/OpensearchTestEmitter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.tests;
import org.apache.flink.api.connector.sink2.SinkWriter;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.opensearch.sink.OpensearchEmitter;
import org.apache.flink.connector.opensearch.sink.RequestIndexer;
import org.opensearch.action.update.UpdateRequest;
import java.util.HashMap;
import java.util.Map;
/** Test emitter for performing Opensearch indexing requests. */
public class OpensearchTestEmitter implements OpensearchEmitter<Tuple2<Integer, String>> {
private static final long serialVersionUID = 1L;
private final String indexName;
OpensearchTestEmitter(String indexName) {
this.indexName = indexName;
}
@Override
public void emit(
Tuple2<Integer, String> element, SinkWriter.Context context, RequestIndexer indexer) {
final Map<String, Object> json = new HashMap<>();
json.put("key", element.f0);
json.put("value", element.f1);
final UpdateRequest updateRequest =
new UpdateRequest(indexName, String.valueOf(element.f0)).doc(json).upsert(json);
indexer.add(updateRequest);
}
}
| 2,883 |
0 | Create_ds/flink-connector-opensearch/flink-connector-opensearch-e2e-tests/src/main/java/org/apache/flink/streaming | Create_ds/flink-connector-opensearch/flink-connector-opensearch-e2e-tests/src/main/java/org/apache/flink/streaming/tests/ComparableTuple2.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.tests;
import org.apache.flink.api.java.tuple.Tuple2;
/** Variation of the {@link Tuple2} which implements {@link Comparable}. */
public class ComparableTuple2<T0 extends Comparable<? super T0>, T1 extends Comparable<? super T1>>
extends Tuple2<T0, T1> implements Comparable<ComparableTuple2<T0, T1>> {
private static final long serialVersionUID = 1L;
public ComparableTuple2(T0 f0, T1 f1) {
super(f0, f1);
}
@Override
public int compareTo(ComparableTuple2<T0, T1> other) {
int d = this.f0.compareTo(other.f0);
if (d == 0) {
return this.f1.compareTo(other.f1);
}
return d;
}
/** Creates a new key-value pair. */
public static <K extends Comparable<? super K>, T1 extends Comparable<? super T1>>
ComparableTuple2<K, T1> of(K key, T1 value) {
return new ComparableTuple2<>(key, value);
}
}
| 2,884 |
0 | Create_ds/flink-connector-opensearch/flink-connector-opensearch-e2e-tests/src/main/java/org/apache/flink/streaming | Create_ds/flink-connector-opensearch/flink-connector-opensearch-e2e-tests/src/main/java/org/apache/flink/streaming/tests/OpensearchSinkExample.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.tests;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.opensearch.ActionRequestFailureHandler;
import org.apache.flink.streaming.connectors.opensearch.OpensearchSink;
import org.apache.flink.streaming.connectors.opensearch.RequestIndexer;
import org.apache.flink.util.Collector;
import org.apache.http.HttpHost;
import org.opensearch.action.ActionRequest;
import org.opensearch.action.index.IndexRequest;
import org.opensearch.action.update.UpdateRequest;
import org.opensearch.client.Requests;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/** End to end test for OpensearchSink. */
public class OpensearchSinkExample {
public static void main(String[] args) throws Exception {
final ParameterTool parameterTool = ParameterTool.fromArgs(args);
if (parameterTool.getNumberOfParameters() < 2) {
System.out.println(
"Missing parameters!\n" + "Usage: --numRecords <numRecords> --index <index>");
return;
}
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.enableCheckpointing(5000);
DataStream<Tuple2<String, String>> source =
env.generateSequence(0, parameterTool.getInt("numRecords") - 1)
.flatMap(
new FlatMapFunction<Long, Tuple2<String, String>>() {
@Override
public void flatMap(
Long value, Collector<Tuple2<String, String>> out) {
final String key = String.valueOf(value);
final String message = "message #" + value;
out.collect(Tuple2.of(key, message + "update #1"));
out.collect(Tuple2.of(key, message + "update #2"));
}
});
List<HttpHost> httpHosts = new ArrayList<>();
httpHosts.add(new HttpHost("127.0.0.1", 9200, "http"));
OpensearchSink.Builder<Tuple2<String, String>> osSinkBuilder =
new OpensearchSink.Builder<>(
httpHosts,
(Tuple2<String, String> element,
RuntimeContext ctx,
RequestIndexer indexer) -> {
indexer.add(createIndexRequest(element.f1, parameterTool));
indexer.add(createUpdateRequest(element, parameterTool));
});
osSinkBuilder.setFailureHandler(
new CustomFailureHandler(parameterTool.getRequired("index")));
// this instructs the sink to emit after every element, otherwise they would be buffered
osSinkBuilder.setBulkFlushMaxActions(1);
source.addSink(osSinkBuilder.build());
env.execute("Opensearch end to end sink test example");
}
private static class CustomFailureHandler implements ActionRequestFailureHandler {
private static final long serialVersionUID = 942269087742453482L;
private final String index;
CustomFailureHandler(String index) {
this.index = index;
}
@Override
public void onFailure(
ActionRequest action, Throwable failure, int restStatusCode, RequestIndexer indexer)
throws Throwable {
if (action instanceof IndexRequest) {
Map<String, Object> json = new HashMap<>();
json.put("data", ((IndexRequest) action).source());
indexer.add(
Requests.indexRequest()
.index(index)
.id(((IndexRequest) action).id())
.source(json));
} else {
throw new IllegalStateException("unexpected");
}
}
}
private static IndexRequest createIndexRequest(String element, ParameterTool parameterTool) {
Map<String, Object> json = new HashMap<>();
json.put("data", element);
String index;
if (element.startsWith("message #15")) {
index = ":intentional invalid index:";
} else {
index = parameterTool.getRequired("index");
}
return Requests.indexRequest().index(index).id(element).source(json);
}
private static UpdateRequest createUpdateRequest(
Tuple2<String, String> element, ParameterTool parameterTool) {
Map<String, Object> json = new HashMap<>();
json.put("data", element.f1);
return new UpdateRequest(parameterTool.getRequired("index"), element.f0)
.doc(json)
.upsert(json);
}
}
| 2,885 |
0 | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/streaming/connectors | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/streaming/connectors/opensearch/OpensearchSinkTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.connectors.opensearch;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.testutils.CheckedThread;
import org.apache.flink.streaming.api.operators.StreamSink;
import org.apache.flink.streaming.connectors.opensearch.util.NoOpFailureHandler;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.apache.flink.streaming.util.MockStreamingRuntimeContext;
import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.entity.ContentType;
import org.apache.http.impl.bootstrap.HttpServer;
import org.apache.http.impl.bootstrap.ServerBootstrap;
import org.apache.http.protocol.HttpRequestHandlerMapper;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.opensearch.action.ActionRequest;
import org.opensearch.action.DocWriteRequest.OpType;
import org.opensearch.action.bulk.BulkItemResponse;
import org.opensearch.action.bulk.BulkItemResponse.Failure;
import org.opensearch.action.bulk.BulkResponse;
import org.opensearch.action.index.IndexResponse;
import org.opensearch.client.Requests;
import org.opensearch.common.xcontent.ToXContent;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.json.JsonXContent;
import org.opensearch.index.shard.ShardId;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.Deque;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentLinkedDeque;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.Consumer;
import java.util.function.Supplier;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
/** Suite of tests for {@link OpensearchSink}. */
public class OpensearchSinkTest {
private HttpServer server;
private final Deque<Consumer<HttpResponse>> responses = new ConcurrentLinkedDeque<>();
private final Lock lock = new ReentrantLock();
private final Condition flushed = lock.newCondition();
@BeforeEach
public void setUp() throws IOException {
final HttpRequestHandlerMapper handlers =
(request) -> {
final String method = request.getRequestLine().getMethod();
if (method.equalsIgnoreCase("HEAD")) {
// Connection request always OKed
return (req, resp, context) -> resp.setStatusCode(200);
} else if (method.equalsIgnoreCase("POST")) {
// Bulk responses are configured per test case
return (req, resp, context) -> {
lock.lock();
try {
responses.poll().accept(resp);
flushed.signalAll();
} finally {
lock.unlock();
}
};
} else {
return null;
}
};
server = ServerBootstrap.bootstrap().setHandlerMapper(handlers).create();
server.start();
}
@AfterEach
public void tearDown() {
server.stop();
server = null;
responses.clear();
}
/**
* Tests that any item failure in the listener callbacks is rethrown on an immediately following
* invoke call.
*/
@Test
public void testItemFailureRethrownOnInvoke() throws Throwable {
final OpensearchSink.Builder<String> builder =
new OpensearchSink.Builder<>(
Arrays.asList(new HttpHost("localhost", server.getLocalPort())),
new SimpleSinkFunction<String>());
builder.setBulkFlushMaxActions(1);
builder.setFailureHandler(new NoOpFailureHandler());
final OpensearchSink<String> sink = builder.build();
final OneInputStreamOperatorTestHarness<String, Object> testHarness =
new OneInputStreamOperatorTestHarness<>(new StreamSink<>(sink));
responses.add(
createResponse(
new BulkItemResponse(
1,
OpType.INDEX,
new Failure(
"test",
"_doc",
"1",
new Exception("artificial failure for record")))));
testHarness.open();
// setup the next bulk request, and its mock item failures
testHarness.processElement(new StreamRecord<>("msg"));
assertThatThrownBy(() -> testHarness.processElement(new StreamRecord<>("next msg")))
.getCause()
.hasMessageContaining("artificial failure for record");
}
/**
* Tests that any item failure in the listener callbacks is rethrown on an immediately following
* checkpoint.
*/
@Test
public void testItemFailureRethrownOnCheckpoint() throws Throwable {
final OpensearchSink.Builder<String> builder =
new OpensearchSink.Builder<>(
Arrays.asList(new HttpHost("localhost", server.getLocalPort())),
new SimpleSinkFunction<String>());
builder.setBulkFlushMaxActions(1);
builder.setFailureHandler(new NoOpFailureHandler());
final OpensearchSink<String> sink = builder.build();
final OneInputStreamOperatorTestHarness<String, Object> testHarness =
new OneInputStreamOperatorTestHarness<>(new StreamSink<>(sink));
testHarness.open();
responses.add(
createResponse(
new BulkItemResponse(
1,
OpType.INDEX,
new Failure(
"test",
"_doc",
"1",
new Exception("artificial failure for record")))));
testHarness.processElement(new StreamRecord<>("msg"));
assertThatThrownBy(() -> testHarness.snapshot(1L, 1000L))
.getCause()
.getCause()
.hasMessageContaining("artificial failure for record");
}
/**
* Tests that any item failure in the listener callbacks due to flushing on an immediately
* following checkpoint is rethrown; we set a timeout because the test will not finish if the
* logic is broken.
*/
@Test
@Timeout(5)
public void testItemFailureRethrownOnCheckpointAfterFlush() throws Throwable {
final OpensearchSink.Builder<String> builder =
new OpensearchSink.Builder<>(
Arrays.asList(new HttpHost("localhost", server.getLocalPort())),
new SimpleSinkFunction<String>());
builder.setBulkFlushInterval(1000);
builder.setFailureHandler(new NoOpFailureHandler());
final OpensearchSink<String> sink = builder.build();
final OneInputStreamOperatorTestHarness<String, Object> testHarness =
new OneInputStreamOperatorTestHarness<>(new StreamSink<>(sink));
testHarness.open();
responses.add(
createResponse(
new BulkItemResponse(
1,
OpType.INDEX,
new IndexResponse(
new ShardId("test", "-", 0), "_doc", "1", 0, 0, 1, true))));
responses.add(
createResponse(
new BulkItemResponse(
2,
OpType.INDEX,
new Failure(
"test",
"_doc",
"2",
new Exception("artificial failure for record")))));
testHarness.processElement(new StreamRecord<>("msg-1"));
// Await for flush to be complete
awaitForFlushToFinish();
// setup the requests to be flushed in the snapshot
testHarness.processElement(new StreamRecord<>("msg-2"));
// let the snapshot-triggered flush continue (2 records in the bulk, so the 2nd one should
// fail)
testHarness.processElement(new StreamRecord<>("msg-3"));
CheckedThread snapshotThread =
new CheckedThread() {
@Override
public void go() throws Exception {
testHarness.snapshot(1L, 1000L);
}
};
snapshotThread.start();
// Await for flush to be complete
awaitForFlushToFinish();
assertThatThrownBy(snapshotThread::sync)
.getCause()
.getCause()
.hasMessageContaining("artificial failure for record");
}
/**
* Tests that any bulk failure in the listener callbacks is rethrown on an immediately following
* invoke call.
*/
@Test
public void testBulkFailureRethrownOnInvoke() throws Throwable {
final OpensearchSink.Builder<String> builder =
new OpensearchSink.Builder<>(
Arrays.asList(new HttpHost("localhost", server.getLocalPort())),
new SimpleSinkFunction<String>());
builder.setBulkFlushMaxActions(1);
builder.setFailureHandler(new NoOpFailureHandler());
final OpensearchSink<String> sink = builder.build();
final OneInputStreamOperatorTestHarness<String, Object> testHarness =
new OneInputStreamOperatorTestHarness<>(new StreamSink<>(sink));
testHarness.open();
// Let the whole bulk request fail
responses.add(response -> response.setStatusCode(500));
testHarness.processElement(new StreamRecord<>("msg"));
assertThatThrownBy(() -> testHarness.processElement(new StreamRecord<>("next msg")))
.getCause()
.hasMessageContaining("Unable to parse response body");
}
/**
* Tests that any bulk failure in the listener callbacks is rethrown on an immediately following
* checkpoint.
*/
@Test
public void testBulkFailureRethrownOnCheckpoint() throws Throwable {
final OpensearchSink.Builder<String> builder =
new OpensearchSink.Builder<>(
Arrays.asList(new HttpHost("localhost", server.getLocalPort())),
new SimpleSinkFunction<String>());
builder.setBulkFlushMaxActions(1);
builder.setFailureHandler(new NoOpFailureHandler());
final OpensearchSink<String> sink = builder.build();
final OneInputStreamOperatorTestHarness<String, Object> testHarness =
new OneInputStreamOperatorTestHarness<>(new StreamSink<>(sink));
testHarness.open();
// Let the whole bulk request fail
responses.add(response -> response.setStatusCode(500));
testHarness.processElement(new StreamRecord<>("msg"));
assertThatThrownBy(() -> testHarness.snapshot(1L, 1000L))
.getCause()
.getCause()
.hasMessageContaining("Unable to parse response body");
}
/**
* Tests that any bulk failure in the listener callbacks due to flushing on an immediately
* following checkpoint is rethrown; we set a timeout because the test will not finish if the
* logic is broken.
*/
@Test
@Timeout(5)
public void testBulkFailureRethrownOnOnCheckpointAfterFlush() throws Throwable {
final OpensearchSink.Builder<String> builder =
new OpensearchSink.Builder<>(
Arrays.asList(new HttpHost("localhost", server.getLocalPort())),
new SimpleSinkFunction<String>());
builder.setBulkFlushInterval(1000);
builder.setFailureHandler(new NoOpFailureHandler());
final OpensearchSink<String> sink = builder.build();
final OneInputStreamOperatorTestHarness<String, Object> testHarness =
new OneInputStreamOperatorTestHarness<>(new StreamSink<>(sink));
testHarness.open();
responses.add(
createResponse(
new BulkItemResponse(
1,
OpType.INDEX,
new IndexResponse(
new ShardId("test", "-", 0), "_doc", "1", 0, 0, 1, true))));
// Let the whole bulk request fail
responses.add(response -> response.setStatusCode(500));
// setup the next bulk request, and let bulk request succeed
testHarness.processElement(new StreamRecord<>("msg-1"));
// Await for flush to be complete
awaitForFlushToFinish();
// setup the requests to be flushed in the snapshot
testHarness.processElement(new StreamRecord<>("msg-2"));
testHarness.processElement(new StreamRecord<>("msg-3"));
CheckedThread snapshotThread =
new CheckedThread() {
@Override
public void go() throws Exception {
testHarness.snapshot(1L, 1000L);
}
};
snapshotThread.start();
// Await for flush to be complete
awaitForFlushToFinish();
assertThatThrownBy(snapshotThread::sync)
.getCause()
.getCause()
.hasMessageContaining("Unable to parse response body");
}
/**
* Tests that the sink correctly waits for pending requests (including re-added requests) on
* checkpoints; we set a timeout because the test will not finish if the logic is broken.
*/
@Test
@Timeout(5)
public void testAtLeastOnceSink() throws Throwable {
final OpensearchSink.Builder<String> builder =
new OpensearchSink.Builder<>(
Arrays.asList(new HttpHost("localhost", server.getLocalPort())),
new SimpleSinkFunction<String>());
builder.setBulkFlushInterval(1000);
// use a failure handler that simply re-adds requests
builder.setFailureHandler(new DummyRetryFailureHandler());
final OpensearchSink<String> sink = builder.build();
final OneInputStreamOperatorTestHarness<String, Object> testHarness =
new OneInputStreamOperatorTestHarness<>(new StreamSink<>(sink));
testHarness.open();
// setup the next bulk request, and its mock item failures;
// it contains 1 request, which will fail and re-added to the next bulk request
responses.add(
createResponse(
new BulkItemResponse(
1,
OpType.INDEX,
new Failure(
"test",
"_doc",
"1",
new Exception("artificial failure for record")))));
responses.add(
createResponse(
new BulkItemResponse(
2,
OpType.INDEX,
new IndexResponse(
new ShardId("test", "-", 0), "_doc", "2", 0, 0, 1, true))));
testHarness.processElement(new StreamRecord<>("msg"));
// current number of pending request should be 1 due to the re-add
assertThat(sink.getNumPendingRequests()).isEqualTo(1);
CheckedThread snapshotThread =
new CheckedThread() {
@Override
public void go() throws Exception {
testHarness.snapshot(1L, 1000L);
}
};
snapshotThread.start();
// Await for flush to be complete
awaitForFlushToFinish();
// since the previous flush should have resulted in a request re-add from the failure
// handler,
// we should have flushed again, and eventually be blocked before snapshot triggers the 2nd
// flush
// current number of pending request should be 1 due to the re-add, since the
// failureRequestIndexer will be called only on the next bulk flush interval, we may need
// to wait for numPendingRequests to be updated.
awaitForCondition(() -> sink.getNumPendingRequests() == 1);
// Await for flush to be complete
awaitForFlushToFinish();
// the snapshot should finish with no exceptions
snapshotThread.sync();
testHarness.close();
}
/**
* This test is meant to assure that testAtLeastOnceSink is valid by testing that if flushing is
* disabled, the snapshot method does indeed finishes without waiting for pending requests; we
* set a timeout because the test will not finish if the logic is broken.
*/
@Test
@Timeout(5)
public void testDoesNotWaitForPendingRequestsIfFlushingDisabled() throws Exception {
final OpensearchSink.Builder<String> builder =
new OpensearchSink.Builder<>(
Arrays.asList(new HttpHost("localhost", server.getLocalPort())),
new SimpleSinkFunction<String>());
final OpensearchSink<String> sink = builder.build();
sink.disableFlushOnCheckpoint(); // disable flushing
final OneInputStreamOperatorTestHarness<String, Object> testHarness =
new OneInputStreamOperatorTestHarness<>(new StreamSink<>(sink));
testHarness.open();
responses.add(
createResponse(
new BulkItemResponse(
1,
OpType.INDEX,
new Failure(
"test",
"_doc",
"1",
new Exception("artificial failure for record")))));
testHarness.processElement(new StreamRecord<>("msg-1"));
// the snapshot should not block even though we haven't flushed the bulk request
testHarness.snapshot(1L, 1000L);
assertThatThrownBy(() -> testHarness.close())
.getCause()
.hasMessageContaining("artificial failure for record");
}
@Test
public void testOpenAndCloseInSinkFunction() throws Exception {
final SimpleClosableSinkFunction<String> sinkFunction = new SimpleClosableSinkFunction<>();
final OpensearchSink.Builder<String> builder =
new OpensearchSink.Builder<>(
Arrays.asList(new HttpHost("localhost", server.getLocalPort())),
sinkFunction);
builder.setFailureHandler(new DummyRetryFailureHandler());
final OpensearchSink<String> sink = builder.build();
sink.setRuntimeContext(new MockStreamingRuntimeContext(false, 1, 0));
sink.open(new Configuration());
sink.close();
assertThat(sinkFunction.openCalled).isTrue();
assertThat(sinkFunction.closeCalled).isTrue();
}
private static class SimpleSinkFunction<String> implements OpensearchSinkFunction<String> {
private static final long serialVersionUID = -176739293659135148L;
@Override
public void process(String element, RuntimeContext ctx, RequestIndexer indexer) {
Map<java.lang.String, Object> json = new HashMap<>();
json.put("data", element);
indexer.add(Requests.indexRequest().index("index").type("type").id("id").source(json));
}
}
private static class SimpleClosableSinkFunction<String>
implements OpensearchSinkFunction<String> {
private static final long serialVersionUID = 1872065917794006848L;
private boolean openCalled;
private boolean closeCalled;
@Override
public void open() {
openCalled = true;
}
@Override
public void close() {
closeCalled = true;
}
@Override
public void process(String element, RuntimeContext ctx, RequestIndexer indexer) {}
}
private static class DummyRetryFailureHandler implements ActionRequestFailureHandler {
private static final long serialVersionUID = 5400023700099200745L;
@Override
public void onFailure(
ActionRequest action, Throwable failure, int restStatusCode, RequestIndexer indexer)
throws Throwable {
indexer.add(action);
}
}
private static Consumer<HttpResponse> createResponse(BulkItemResponse item) {
return response -> {
try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
response.setStatusCode(200);
try (XContentBuilder builder =
new XContentBuilder(JsonXContent.jsonXContent, baos)) {
final BulkResponse bulkResponse =
new BulkResponse(new BulkItemResponse[] {item}, 200);
bulkResponse.toXContent(builder, ToXContent.EMPTY_PARAMS);
}
response.setEntity(
new ByteArrayEntity(baos.toByteArray(), ContentType.APPLICATION_JSON));
} catch (final IOException ex) {
response.setStatusCode(500);
}
};
}
private static void awaitForCondition(Supplier<Boolean> condition) throws InterruptedException {
while (!condition.get()) {
Thread.sleep(1);
}
}
private void awaitForFlushToFinish() throws InterruptedException {
lock.lock();
try {
flushed.await();
} finally {
lock.unlock();
}
}
}
| 2,886 |
0 | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/streaming/connectors | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/streaming/connectors/opensearch/OpensearchSinkITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.connectors.opensearch;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.opensearch.OpensearchUtil;
import org.apache.flink.connector.opensearch.test.DockerImageVersions;
import org.apache.flink.runtime.JobException;
import org.apache.flink.runtime.client.JobExecutionException;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.opensearch.testutils.SourceSinkDataTestKit;
import org.apache.flink.test.util.AbstractTestBase;
import org.apache.http.HttpHost;
import org.junit.jupiter.api.Test;
import org.opensearch.client.RestHighLevelClient;
import org.opensearch.testcontainers.OpensearchContainer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.junit.jupiter.Testcontainers;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.function.Function;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
/** IT cases for the {@link OpensearchSink}. */
@Testcontainers
public class OpensearchSinkITCase extends AbstractTestBase {
private static final Logger LOG = LoggerFactory.getLogger(OpensearchSinkITCase.class);
@Container
private static final OpensearchContainer OS_CONTAINER =
OpensearchUtil.createOpensearchContainer(DockerImageVersions.OPENSEARCH_1, LOG);
@Test
public void testOpensearchSink() throws Exception {
runOpensearchSinkTest(
"opensearch-sink-test-json-index", SourceSinkDataTestKit::getJsonSinkFunction);
}
@Test
public void testOpensearchSinkWithSmile() throws Exception {
runOpensearchSinkTest(
"opensearch-sink-test-smile-index", SourceSinkDataTestKit::getSmileSinkFunction);
}
@Test
public void testNullAddresses() {
assertThatThrownBy(
() ->
createOpensearchSink(
1, null, SourceSinkDataTestKit.getJsonSinkFunction("test")))
.isInstanceOfAny(IllegalArgumentException.class, NullPointerException.class);
}
@Test
public void testEmptyAddresses() {
assertThatThrownBy(
() ->
createOpensearchSink(
1,
Collections.emptyList(),
SourceSinkDataTestKit.getJsonSinkFunction("test")))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
public void testInvalidOpensearchCluster() throws Exception {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStreamSource<Tuple2<Integer, String>> source =
env.addSource(new SourceSinkDataTestKit.TestDataSourceFunction());
source.addSink(
createOpensearchSinkForNode(
1,
SourceSinkDataTestKit.getJsonSinkFunction("test"),
"123.123.123.123")); // incorrect ip address
assertThatThrownBy(() -> env.execute("Opensearch Sink Test"))
.isInstanceOf(JobExecutionException.class)
.hasCauseInstanceOf(JobException.class);
}
private OpensearchSink<Tuple2<Integer, String>> createOpensearchSink(
int bulkFlushMaxActions,
List<HttpHost> httpHosts,
OpensearchSinkFunction<Tuple2<Integer, String>> opensearchSinkFunction) {
OpensearchSink.Builder<Tuple2<Integer, String>> builder =
new OpensearchSink.Builder<>(httpHosts, opensearchSinkFunction);
builder.setBulkFlushMaxActions(bulkFlushMaxActions);
return builder.build();
}
private OpensearchSink<Tuple2<Integer, String>> createOpensearchSinkForNode(
int bulkFlushMaxActions,
OpensearchSinkFunction<Tuple2<Integer, String>> opensearchSinkFunction,
String hostAddress) {
ArrayList<HttpHost> httpHosts = new ArrayList<>();
httpHosts.add(HttpHost.create(hostAddress));
OpensearchSink.Builder<Tuple2<Integer, String>> builder =
new OpensearchSink.Builder<>(httpHosts, opensearchSinkFunction);
builder.setBulkFlushMaxActions(bulkFlushMaxActions);
builder.setRestClientFactory(OpensearchUtil.createClientFactory(OS_CONTAINER));
return builder.build();
}
private void runOpensearchSinkTest(
String index,
Function<String, OpensearchSinkFunction<Tuple2<Integer, String>>> functionFactory)
throws Exception {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStreamSource<Tuple2<Integer, String>> source =
env.addSource(new SourceSinkDataTestKit.TestDataSourceFunction());
source.addSink(
createOpensearchSinkForNode(
1, functionFactory.apply(index), OS_CONTAINER.getHttpHostAddress()));
env.execute("Opensearch Sink Test");
// verify the results
final RestHighLevelClient client = OpensearchUtil.createClient(OS_CONTAINER);
SourceSinkDataTestKit.verifyProducedSinkData(client, index);
client.close();
}
}
| 2,887 |
0 | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/streaming/connectors/opensearch | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/streaming/connectors/opensearch/testutils/SourceSinkDataTestKit.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.connectors.opensearch.testutils;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.connectors.opensearch.OpensearchSinkFunction;
import org.apache.flink.streaming.connectors.opensearch.RequestIndexer;
import org.opensearch.action.get.GetRequest;
import org.opensearch.action.get.GetResponse;
import org.opensearch.action.index.IndexRequest;
import org.opensearch.client.RequestOptions;
import org.opensearch.client.RestHighLevelClient;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
import java.io.IOException;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
/**
* This class contains utilities and a pre-defined source function and Opensearch Sink function used
* to simulate and verify data used in tests.
*/
public class SourceSinkDataTestKit {
private static final int NUM_ELEMENTS = 20;
private static final String DATA_PREFIX = "message #";
private static final String DATA_FIELD_NAME = "data";
/**
* A {@link SourceFunction} that generates the elements (id, "message #" + id) with id being 0 -
* 20.
*/
public static class TestDataSourceFunction implements SourceFunction<Tuple2<Integer, String>> {
private static final long serialVersionUID = 1L;
private volatile boolean running = true;
@Override
public void run(SourceFunction.SourceContext<Tuple2<Integer, String>> ctx)
throws Exception {
for (int i = 0; i < NUM_ELEMENTS && running; i++) {
ctx.collect(Tuple2.of(i, DATA_PREFIX + i));
}
}
@Override
public void cancel() {
running = false;
}
}
public static OpensearchSinkFunction<Tuple2<Integer, String>> getJsonSinkFunction(
String index) {
return new TestOpensearchSinkFunction(index, XContentFactory::jsonBuilder);
}
public static OpensearchSinkFunction<Tuple2<Integer, String>> getSmileSinkFunction(
String index) {
return new TestOpensearchSinkFunction(index, XContentFactory::smileBuilder);
}
private static class TestOpensearchSinkFunction
implements OpensearchSinkFunction<Tuple2<Integer, String>> {
private static final long serialVersionUID = 1L;
private final String index;
private final XContentBuilderProvider contentBuilderProvider;
/**
* Create the sink function, specifying a target Opensearch index.
*
* @param index Name of the target Opensearch index.
*/
public TestOpensearchSinkFunction(
String index, XContentBuilderProvider contentBuilderProvider) {
this.index = index;
this.contentBuilderProvider = contentBuilderProvider;
}
public IndexRequest createIndexRequest(Tuple2<Integer, String> element) {
Map<String, Object> document = new HashMap<>();
document.put(DATA_FIELD_NAME, element.f1);
try {
return new IndexRequest(index)
.id(element.f0.toString())
.source(contentBuilderProvider.getBuilder().map(document));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public void process(
Tuple2<Integer, String> element, RuntimeContext ctx, RequestIndexer indexer) {
indexer.add(createIndexRequest(element));
}
}
/**
* Verify the results in an Opensearch index. The results must first be produced into the index
* using a {@link TestOpensearchSinkFunction};
*
* @param client The client to use to connect to Opensearch
* @param index The index to check
* @throws IOException IOException
*/
public static void verifyProducedSinkData(RestHighLevelClient client, String index)
throws IOException {
for (int i = 0; i < NUM_ELEMENTS; i++) {
GetResponse response =
client.get(new GetRequest(index, Integer.toString(i)), RequestOptions.DEFAULT);
assertThat(response.getSource().get(DATA_FIELD_NAME)).isEqualTo(DATA_PREFIX + i);
}
}
@FunctionalInterface
private interface XContentBuilderProvider extends Serializable {
XContentBuilder getBuilder() throws IOException;
}
}
| 2,888 |
0 | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/architecture/ProductionCodeArchitectureTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.architecture;
import org.apache.flink.architecture.common.ImportOptions;
import com.tngtech.archunit.core.importer.ImportOption;
import com.tngtech.archunit.junit.AnalyzeClasses;
import com.tngtech.archunit.junit.ArchTest;
import com.tngtech.archunit.junit.ArchTests;
/** product code Architecture tests. */
@AnalyzeClasses(
packages = "org.apache.flink.connector",
importOptions = {
ImportOption.DoNotIncludeTests.class,
ImportOption.DoNotIncludeArchives.class,
ImportOptions.ExcludeScalaImportOption.class,
ImportOptions.ExcludeShadedImportOption.class
})
public class ProductionCodeArchitectureTest {
@ArchTest
public static final ArchTests COMMON_TESTS = ArchTests.in(ProductionCodeArchitectureBase.class);
}
| 2,889 |
0 | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/architecture/TestCodeArchitectureTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.architecture;
import org.apache.flink.architecture.common.ImportOptions;
import com.tngtech.archunit.core.importer.ImportOption;
import com.tngtech.archunit.junit.AnalyzeClasses;
import com.tngtech.archunit.junit.ArchTest;
import com.tngtech.archunit.junit.ArchTests;
/** Architecture tests for test code. */
@AnalyzeClasses(
packages = {
"org.apache.flink.connector.opensearch",
"org.apache.flink.streaming.connectors.opensearch"
},
importOptions = {
ImportOption.OnlyIncludeTests.class,
ImportOptions.ExcludeScalaImportOption.class,
ImportOptions.ExcludeShadedImportOption.class
})
public class TestCodeArchitectureTest {
@ArchTest
public static final ArchTests COMMON_TESTS = ArchTests.in(TestCodeArchitectureTestBase.class);
}
| 2,890 |
0 | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/connector | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch/OpensearchUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.connector.opensearch;
import org.apache.flink.annotation.Internal;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.streaming.connectors.opensearch.RestClientFactory;
import org.apache.flink.table.connector.sink.DynamicTableSink;
import org.apache.flink.table.types.DataType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.conn.ssl.TrustAllStrategy;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.ssl.SSLContexts;
import org.opensearch.client.RestClient;
import org.opensearch.client.RestClientBuilder.HttpClientConfigCallback;
import org.opensearch.client.RestHighLevelClient;
import org.opensearch.testcontainers.OpensearchContainer;
import org.slf4j.Logger;
import org.testcontainers.containers.output.Slf4jLogConsumer;
import org.testcontainers.utility.DockerImageName;
import java.security.KeyManagementException;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.util.Optional;
/** Collection of utility methods for Opensearch tests. */
@Internal
public class OpensearchUtil {
private OpensearchUtil() {}
/**
* Creates a preconfigured {@link OpensearchContainer} with limited memory allocation and aligns
* the internal Opensearch log levels with the ones used by the capturing logger.
*
* @param dockerImageVersion describing the Opensearch image
* @param log to derive the log level from
* @return configured Opensearch container
*/
public static OpensearchContainer createOpensearchContainer(
String dockerImageVersion, Logger log) {
String logLevel;
if (log.isTraceEnabled()) {
logLevel = "TRACE";
} else if (log.isDebugEnabled()) {
logLevel = "DEBUG";
} else if (log.isInfoEnabled()) {
logLevel = "INFO";
} else if (log.isWarnEnabled()) {
logLevel = "WARN";
} else if (log.isErrorEnabled()) {
logLevel = "ERROR";
} else {
logLevel = "OFF";
}
return new OpensearchContainer(DockerImageName.parse(dockerImageVersion))
.withEnv("OPENSEARCH_JAVA_OPTS", "-Xms2g -Xmx2g")
.withEnv("logger.org.opensearch", logLevel)
.withLogConsumer(new Slf4jLogConsumer(log));
}
/**
* Creates a preconfigured {@link RestHighLevelClient} instance for specific {@link
* OpensearchContainer} instance.
*
* @return preconfigured {@link RestHighLevelClient} instance
*/
public static RestHighLevelClient createClient(OpensearchContainer container) {
final String username = container.getUsername();
final String password = container.getPassword();
return new RestHighLevelClient(
RestClient.builder(HttpHost.create(container.getHttpHostAddress()))
.setHttpClientConfigCallback(
createClientConfigCallback(username, password)));
}
/**
* Creates a preconfigured {@link RestClientFactory} instance for specific {@link
* OpensearchContainer} instance.
*
* @return preconfigured {@link RestClientFactory} instance
*/
public static RestClientFactory createClientFactory(OpensearchContainer container) {
final String username = container.getUsername();
final String password = container.getPassword();
return factory ->
factory.setHttpClientConfigCallback(createClientConfigCallback(username, password));
}
/**
* Creates a dedicated {@link HttpClientConfigCallback} instance for specific {@link
* OpensearchContainer} instance.
*
* @return dedicated {@link HttpClientConfigCallback} instance
*/
private static HttpClientConfigCallback createClientConfigCallback(
final String username, final String password) {
return (httpClientBuilder) -> {
try {
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(
AuthScope.ANY, new UsernamePasswordCredentials(username, password));
return httpClientBuilder
.setDefaultCredentialsProvider(credentialsProvider)
.setSSLContext(
SSLContexts.custom()
.loadTrustMaterial(new TrustAllStrategy())
.build());
} catch (final NoSuchAlgorithmException
| KeyStoreException
| KeyManagementException ex) {
throw new RuntimeException(ex);
}
};
}
/** A mock {@link DynamicTableSink.Context} for Opensearch tests. */
public static class MockContext implements DynamicTableSink.Context {
@Override
public boolean isBounded() {
return false;
}
@Override
public TypeInformation<?> createTypeInformation(DataType consumedDataType) {
return null;
}
@Override
public TypeInformation<?> createTypeInformation(LogicalType consumedLogicalType) {
return null;
}
@Override
public DynamicTableSink.DataStructureConverter createDataStructureConverter(
DataType consumedDataType) {
return null;
}
public Optional<int[][]> getTargetColumns() {
return Optional.empty();
}
}
}
| 2,891 |
0 | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch/test/DockerImageVersions.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.connector.opensearch.test;
/**
* Utility class for defining the image names and versions of Docker containers used during the
* integration tests.
*/
public class DockerImageVersions {
public static final String OPENSEARCH_1 = "opensearchproject/opensearch:1.3.6";
}
| 2,892 |
0 | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch/sink/OpensearchSinkBuilderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.connector.opensearch.sink;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.util.TestLoggerExtension;
import org.apache.http.HttpHost;
import org.junit.jupiter.api.DynamicTest;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestFactory;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.extension.ExtendWith;
import java.util.stream.Stream;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatNoException;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
/** Tests for {@link OpensearchSinkBuilder}. */
@ExtendWith(TestLoggerExtension.class)
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
class OpensearchSinkBuilderTest {
@TestFactory
Stream<DynamicTest> testValidBuilders() {
Stream<OpensearchSinkBuilder<Object>> validBuilders =
Stream.of(
createMinimalBuilder(),
createMinimalBuilder()
.setDeliveryGuarantee(DeliveryGuarantee.AT_LEAST_ONCE),
createMinimalBuilder()
.setBulkFlushBackoffStrategy(FlushBackoffType.CONSTANT, 1, 1),
createMinimalBuilder()
.setConnectionUsername("username")
.setConnectionPassword("password"));
return DynamicTest.stream(
validBuilders,
OpensearchSinkBuilder::toString,
builder -> assertThatNoException().isThrownBy(builder::build));
}
@Test
void testDefaultDeliveryGuarantee() {
assertThat(createMinimalBuilder().build().getDeliveryGuarantee())
.isEqualTo(DeliveryGuarantee.AT_LEAST_ONCE);
}
@Test
void testThrowIfExactlyOnceConfigured() {
assertThatThrownBy(
() ->
createMinimalBuilder()
.setDeliveryGuarantee(DeliveryGuarantee.EXACTLY_ONCE))
.isInstanceOf(IllegalStateException.class);
}
@Test
void testThrowIfHostsNotSet() {
assertThatThrownBy(
() ->
createEmptyBuilder()
.setEmitter((element, indexer, context) -> {})
.build())
.isInstanceOf(NullPointerException.class);
}
@Test
void testThrowIfEmitterNotSet() {
assertThatThrownBy(
() -> createEmptyBuilder().setHosts(new HttpHost("localhost:3000")).build())
.isInstanceOf(NullPointerException.class);
}
@Test
void testThrowIfSetInvalidTimeouts() {
assertThatThrownBy(() -> createEmptyBuilder().setConnectionRequestTimeout(-1).build())
.isInstanceOf(IllegalStateException.class);
assertThatThrownBy(() -> createEmptyBuilder().setConnectionTimeout(-1).build())
.isInstanceOf(IllegalStateException.class);
assertThatThrownBy(() -> createEmptyBuilder().setSocketTimeout(-1).build())
.isInstanceOf(IllegalStateException.class);
}
@Test
void testThrowIfRestClientFactoryNotSet() {
assertThatThrownBy(() -> createEmptyBuilder().setRestClientFactory(null).build())
.isInstanceOf(NullPointerException.class);
}
@Test
void testThrowIfConnectionPathPrefixNotSet() {
assertThatThrownBy(() -> createEmptyBuilder().setConnectionPathPrefix(null).build())
.isInstanceOf(NullPointerException.class);
}
private OpensearchSinkBuilder<Object> createEmptyBuilder() {
return new OpensearchSinkBuilder<>();
}
private OpensearchSinkBuilder<Object> createMinimalBuilder() {
return new OpensearchSinkBuilder<>()
.setEmitter((element, indexer, context) -> {})
.setHosts(new HttpHost("localhost:3000"));
}
}
| 2,893 |
0 | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch/sink/OpensearchWriterITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.connector.opensearch.sink;
import org.apache.flink.api.common.operators.MailboxExecutor;
import org.apache.flink.api.connector.sink2.SinkWriter;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.opensearch.OpensearchUtil;
import org.apache.flink.connector.opensearch.test.DockerImageVersions;
import org.apache.flink.metrics.Counter;
import org.apache.flink.metrics.Gauge;
import org.apache.flink.metrics.groups.OperatorIOMetricGroup;
import org.apache.flink.metrics.groups.SinkWriterMetricGroup;
import org.apache.flink.metrics.testutils.MetricListener;
import org.apache.flink.runtime.metrics.MetricNames;
import org.apache.flink.runtime.metrics.groups.InternalSinkWriterMetricGroup;
import org.apache.flink.runtime.metrics.groups.UnregisteredMetricGroups;
import org.apache.flink.util.FlinkRuntimeException;
import org.apache.flink.util.TestLoggerExtension;
import org.apache.flink.util.function.ThrowingRunnable;
import org.apache.http.HttpHost;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.opensearch.action.delete.DeleteRequest;
import org.opensearch.action.index.IndexRequest;
import org.opensearch.action.update.UpdateRequest;
import org.opensearch.client.RestHighLevelClient;
import org.opensearch.testcontainers.OpensearchContainer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.junit.jupiter.Testcontainers;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import static org.apache.flink.connector.opensearch.sink.OpensearchTestClient.buildMessage;
import static org.apache.flink.connector.opensearch.sink.OpensearchWriter.DEFAULT_FAILURE_HANDLER;
import static org.assertj.core.api.Assertions.assertThat;
/** Tests for {@link OpensearchWriter}. */
@Testcontainers
@ExtendWith(TestLoggerExtension.class)
class OpensearchWriterITCase {
private static final Logger LOG = LoggerFactory.getLogger(OpensearchWriterITCase.class);
@Container
private static final OpensearchContainer OS_CONTAINER =
OpensearchUtil.createOpensearchContainer(DockerImageVersions.OPENSEARCH_1, LOG);
private RestHighLevelClient client;
private OpensearchTestClient context;
private MetricListener metricListener;
@BeforeEach
void setUp() {
metricListener = new MetricListener();
client = OpensearchUtil.createClient(OS_CONTAINER);
context = new OpensearchTestClient(client);
}
@AfterEach
void tearDown() throws IOException {
if (client != null) {
client.close();
}
}
@Test
void testWriteOnBulkFlush() throws Exception {
final String index = "test-bulk-flush-without-checkpoint";
final int flushAfterNActions = 5;
final BulkProcessorConfig bulkProcessorConfig =
new BulkProcessorConfig(flushAfterNActions, -1, -1, FlushBackoffType.NONE, 0, 0);
try (final OpensearchWriter<Tuple2<Integer, String>> writer =
createWriter(index, false, bulkProcessorConfig)) {
writer.write(Tuple2.of(1, buildMessage(1)), null);
writer.write(Tuple2.of(2, buildMessage(2)), null);
writer.write(Tuple2.of(3, buildMessage(3)), null);
writer.write(Tuple2.of(4, buildMessage(4)), null);
// Ignore flush on checkpoint
writer.flush(false);
context.assertThatIdsAreNotWritten(index, 1, 2, 3, 4);
// Trigger flush
writer.write(Tuple2.of(5, "test-5"), null);
context.assertThatIdsAreWritten(index, 1, 2, 3, 4, 5);
writer.write(Tuple2.of(6, "test-6"), null);
context.assertThatIdsAreNotWritten(index, 6);
// Force flush
writer.blockingFlushAllActions();
context.assertThatIdsAreWritten(index, 1, 2, 3, 4, 5, 6);
}
}
@Test
void testWriteOnBulkIntervalFlush() throws Exception {
final String index = "test-bulk-flush-with-interval";
// Configure bulk processor to flush every 1s;
final BulkProcessorConfig bulkProcessorConfig =
new BulkProcessorConfig(-1, -1, 1000, FlushBackoffType.NONE, 0, 0);
try (final OpensearchWriter<Tuple2<Integer, String>> writer =
createWriter(index, false, bulkProcessorConfig)) {
writer.write(Tuple2.of(1, buildMessage(1)), null);
writer.write(Tuple2.of(2, buildMessage(2)), null);
writer.write(Tuple2.of(3, buildMessage(3)), null);
writer.write(Tuple2.of(4, buildMessage(4)), null);
writer.blockingFlushAllActions();
}
context.assertThatIdsAreWritten(index, 1, 2, 3, 4);
}
@Test
void testWriteOnCheckpoint() throws Exception {
final String index = "test-bulk-flush-with-checkpoint";
final BulkProcessorConfig bulkProcessorConfig =
new BulkProcessorConfig(-1, -1, -1, FlushBackoffType.NONE, 0, 0);
// Enable flush on checkpoint
try (final OpensearchWriter<Tuple2<Integer, String>> writer =
createWriter(index, true, bulkProcessorConfig)) {
writer.write(Tuple2.of(1, buildMessage(1)), null);
writer.write(Tuple2.of(2, buildMessage(2)), null);
writer.write(Tuple2.of(3, buildMessage(3)), null);
context.assertThatIdsAreNotWritten(index, 1, 2, 3);
// Trigger flush
writer.flush(false);
context.assertThatIdsAreWritten(index, 1, 2, 3);
}
}
@Test
void testIncrementByteOutMetric() throws Exception {
final String index = "test-inc-byte-out";
final OperatorIOMetricGroup operatorIOMetricGroup =
UnregisteredMetricGroups.createUnregisteredOperatorMetricGroup().getIOMetricGroup();
final InternalSinkWriterMetricGroup metricGroup =
InternalSinkWriterMetricGroup.mock(
metricListener.getMetricGroup(), operatorIOMetricGroup);
final int flushAfterNActions = 2;
final BulkProcessorConfig bulkProcessorConfig =
new BulkProcessorConfig(flushAfterNActions, -1, -1, FlushBackoffType.NONE, 0, 0);
try (final OpensearchWriter<Tuple2<Integer, String>> writer =
createWriter(index, false, bulkProcessorConfig, metricGroup)) {
final Counter numBytesOut = operatorIOMetricGroup.getNumBytesOutCounter();
assertThat(numBytesOut.getCount()).isEqualTo(0);
writer.write(Tuple2.of(1, buildMessage(1)), null);
writer.write(Tuple2.of(2, buildMessage(2)), null);
writer.blockingFlushAllActions();
long first = numBytesOut.getCount();
assertThat(first).isGreaterThan(0);
writer.write(Tuple2.of(1, buildMessage(1)), null);
writer.write(Tuple2.of(2, buildMessage(2)), null);
writer.blockingFlushAllActions();
assertThat(numBytesOut.getCount()).isGreaterThan(first);
}
}
@Test
void testIncrementRecordsSendMetric() throws Exception {
final String index = "test-inc-records-send";
final int flushAfterNActions = 2;
final BulkProcessorConfig bulkProcessorConfig =
new BulkProcessorConfig(flushAfterNActions, -1, -1, FlushBackoffType.NONE, 0, 0);
try (final OpensearchWriter<Tuple2<Integer, String>> writer =
createWriter(index, false, bulkProcessorConfig)) {
final Optional<Counter> recordsSend =
metricListener.getCounter(MetricNames.NUM_RECORDS_SEND);
writer.write(Tuple2.of(1, buildMessage(1)), null);
// Update existing index
writer.write(Tuple2.of(1, "u" + buildMessage(2)), null);
// Delete index
writer.write(Tuple2.of(1, "d" + buildMessage(3)), null);
writer.blockingFlushAllActions();
assertThat(recordsSend).isPresent();
assertThat(recordsSend.get().getCount()).isEqualTo(3L);
}
}
@Test
void testCurrentSendTime() throws Exception {
final String index = "test-current-send-time";
final int flushAfterNActions = 2;
final BulkProcessorConfig bulkProcessorConfig =
new BulkProcessorConfig(flushAfterNActions, -1, -1, FlushBackoffType.NONE, 0, 0);
try (final OpensearchWriter<Tuple2<Integer, String>> writer =
createWriter(index, false, bulkProcessorConfig)) {
final Optional<Gauge<Long>> currentSendTime =
metricListener.getGauge("currentSendTime");
writer.write(Tuple2.of(1, buildMessage(1)), null);
writer.write(Tuple2.of(2, buildMessage(2)), null);
writer.blockingFlushAllActions();
assertThat(currentSendTime).isPresent();
assertThat(currentSendTime.get().getValue()).isGreaterThan(0L);
}
}
private static class TestHandler implements FailureHandler {
private boolean failed = false;
private synchronized void setFailed() {
failed = true;
}
public boolean isFailed() {
return failed;
}
@Override
public void onFailure(Throwable failure) {
setFailed();
}
}
@Test
void testWriteErrorOnUpdate() throws Exception {
final String index = "test-bulk-flush-with-error";
final int flushAfterNActions = 1;
final BulkProcessorConfig bulkProcessorConfig =
new BulkProcessorConfig(flushAfterNActions, -1, -1, FlushBackoffType.NONE, 0, 0);
final TestHandler testHandler = new TestHandler();
try (final OpensearchWriter<Tuple2<Integer, String>> writer =
createWriter(index, true, bulkProcessorConfig, testHandler)) {
// Trigger an error by updating non-existing document
writer.write(Tuple2.of(1, "u" + buildMessage(1)), null);
context.assertThatIdsAreNotWritten(index, 1);
assertThat(testHandler.isFailed()).isEqualTo(true);
}
}
private OpensearchWriter<Tuple2<Integer, String>> createWriter(
String index, boolean flushOnCheckpoint, BulkProcessorConfig bulkProcessorConfig) {
return createWriter(
index,
flushOnCheckpoint,
bulkProcessorConfig,
InternalSinkWriterMetricGroup.mock(metricListener.getMetricGroup()),
DEFAULT_FAILURE_HANDLER);
}
private OpensearchWriter<Tuple2<Integer, String>> createWriter(
String index,
boolean flushOnCheckpoint,
BulkProcessorConfig bulkProcessorConfig,
FailureHandler failureHandler) {
return createWriter(
index,
flushOnCheckpoint,
bulkProcessorConfig,
InternalSinkWriterMetricGroup.mock(metricListener.getMetricGroup()),
failureHandler);
}
private OpensearchWriter<Tuple2<Integer, String>> createWriter(
String index,
boolean flushOnCheckpoint,
BulkProcessorConfig bulkProcessorConfig,
SinkWriterMetricGroup metricGroup) {
return createWriter(
index,
flushOnCheckpoint,
bulkProcessorConfig,
metricGroup,
DEFAULT_FAILURE_HANDLER);
}
private OpensearchWriter<Tuple2<Integer, String>> createWriter(
String index,
boolean flushOnCheckpoint,
BulkProcessorConfig bulkProcessorConfig,
SinkWriterMetricGroup metricGroup,
FailureHandler failureHandler) {
return new OpensearchWriter<Tuple2<Integer, String>>(
Collections.singletonList(HttpHost.create(OS_CONTAINER.getHttpHostAddress())),
new UpdatingEmitter(index, context.getDataFieldName()),
flushOnCheckpoint,
bulkProcessorConfig,
new NetworkClientConfig(
OS_CONTAINER.getUsername(),
OS_CONTAINER.getPassword(),
null,
null,
null,
null,
true),
metricGroup,
new TestMailbox(),
new DefaultRestClientFactory(),
failureHandler);
}
private static class UpdatingEmitter implements OpensearchEmitter<Tuple2<Integer, String>> {
private static final long serialVersionUID = 1L;
private final String dataFieldName;
private final String index;
UpdatingEmitter(String index, String dataFieldName) {
this.index = index;
this.dataFieldName = dataFieldName;
}
@Override
public void emit(
Tuple2<Integer, String> element,
SinkWriter.Context context,
RequestIndexer indexer) {
Map<String, Object> document = new HashMap<>();
document.put(dataFieldName, element.f1);
final char action = element.f1.charAt(0);
final String id = element.f0.toString();
switch (action) {
case 'd':
{
indexer.add(new DeleteRequest(index).id(id));
break;
}
case 'u':
{
indexer.add(new UpdateRequest().index(index).id(id).doc(document));
break;
}
default:
{
indexer.add(new IndexRequest(index).id(id).source(document));
}
}
}
}
private static class TestMailbox implements MailboxExecutor {
@Override
public void execute(
ThrowingRunnable<? extends Exception> command,
String descriptionFormat,
Object... descriptionArgs) {
try {
command.run();
} catch (Exception e) {
throw new RuntimeException("Unexpected error", e);
}
}
@Override
public void yield() throws InterruptedException, FlinkRuntimeException {
Thread.sleep(100);
}
@Override
public boolean tryYield() throws FlinkRuntimeException {
return false;
}
}
}
| 2,894 |
0 | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch/sink/OpensearchTestClient.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.connector.opensearch.sink;
import org.opensearch.OpenSearchStatusException;
import org.opensearch.action.get.GetRequest;
import org.opensearch.action.get.GetResponse;
import org.opensearch.client.RequestOptions;
import org.opensearch.client.RestHighLevelClient;
import java.io.IOException;
import static org.assertj.core.api.Assertions.assertThat;
class OpensearchTestClient {
private static final String DATA_FIELD_NAME = "data";
private final RestHighLevelClient client;
OpensearchTestClient(RestHighLevelClient client) {
this.client = client;
}
GetResponse getResponse(String index, int id) throws IOException {
return client.get(new GetRequest(index, Integer.toString(id)), RequestOptions.DEFAULT);
}
void assertThatIdsAreNotWritten(String index, int... ids) throws IOException {
for (final int id : ids) {
try {
final GetResponse response = getResponse(index, id);
assertThat(response.isExists())
.isFalse()
.as(String.format("Id %s is unexpectedly present.", id));
} catch (OpenSearchStatusException e) {
assertThat(e.status().getStatus()).isEqualTo(404);
}
}
}
void assertThatIdsAreWritten(String index, int... ids)
throws IOException, InterruptedException {
for (final int id : ids) {
GetResponse response;
do {
response = getResponse(index, id);
Thread.sleep(10);
} while (response.isSourceEmpty());
assertThat(response.getSource().get(DATA_FIELD_NAME)).isEqualTo(buildMessage(id));
}
}
String getDataFieldName() {
return DATA_FIELD_NAME;
}
static String buildMessage(int id) {
return "test-" + id;
}
}
| 2,895 |
0 | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch/sink/TestEmitter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.connector.opensearch.sink;
import org.apache.flink.api.connector.sink2.SinkWriter;
import org.apache.flink.api.java.tuple.Tuple2;
import org.opensearch.action.index.IndexRequest;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
import java.io.IOException;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
class TestEmitter implements OpensearchEmitter<Tuple2<Integer, String>> {
private final String index;
private final XContentBuilderProvider xContentBuilderProvider;
private final String dataFieldName;
public static TestEmitter jsonEmitter(String index, String dataFieldName) {
return new TestEmitter(index, dataFieldName, XContentFactory::jsonBuilder);
}
public static TestEmitter smileEmitter(String index, String dataFieldName) {
return new TestEmitter(index, dataFieldName, XContentFactory::smileBuilder);
}
private TestEmitter(
String index, String dataFieldName, XContentBuilderProvider xContentBuilderProvider) {
this.dataFieldName = dataFieldName;
this.index = index;
this.xContentBuilderProvider = xContentBuilderProvider;
}
@Override
public void emit(
Tuple2<Integer, String> element, SinkWriter.Context context, RequestIndexer indexer) {
indexer.add(createIndexRequest(element));
}
public IndexRequest createIndexRequest(Tuple2<Integer, String> element) {
Map<String, Object> document = new HashMap<>();
document.put(dataFieldName, element.f1);
try {
return new IndexRequest(index)
.id(element.f0.toString())
.source(xContentBuilderProvider.getBuilder().map(document));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@FunctionalInterface
private interface XContentBuilderProvider extends Serializable {
XContentBuilder getBuilder() throws IOException;
}
}
| 2,896 |
0 | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch/sink/OpensearchSinkITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.connector.opensearch.sink;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.CheckpointListener;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.opensearch.OpensearchUtil;
import org.apache.flink.connector.opensearch.test.DockerImageVersions;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.LocalStreamEnvironment;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.TestLoggerExtension;
import org.apache.http.HttpHost;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.EnumSource;
import org.junit.jupiter.params.provider.MethodSource;
import org.opensearch.client.RestHighLevelClient;
import org.opensearch.testcontainers.OpensearchContainer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.junit.jupiter.Testcontainers;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import java.util.function.BiFunction;
import static org.assertj.core.api.Assertions.assertThat;
/** Tests for {@link OpensearchSink}. */
@Testcontainers
@ExtendWith(TestLoggerExtension.class)
class OpensearchSinkITCase {
protected static final Logger LOG = LoggerFactory.getLogger(OpensearchSinkITCase.class);
private static boolean failed;
private RestHighLevelClient client;
private OpensearchTestClient context;
@Container
private static final OpensearchContainer OS_CONTAINER =
OpensearchUtil.createOpensearchContainer(DockerImageVersions.OPENSEARCH_1, LOG);
@BeforeEach
void setUp() {
failed = false;
client = OpensearchUtil.createClient(OS_CONTAINER);
context = new OpensearchTestClient(client);
}
@AfterEach
void tearDown() throws IOException {
if (client != null) {
client.close();
}
}
@ParameterizedTest
@EnumSource(DeliveryGuarantee.class)
void testWriteToOpensearchWithDeliveryGuarantee(DeliveryGuarantee deliveryGuarantee)
throws Exception {
final String index = "test-opensearch-with-delivery-" + deliveryGuarantee;
boolean failure = false;
try {
runTest(index, false, TestEmitter::jsonEmitter, deliveryGuarantee, null);
} catch (IllegalStateException e) {
failure = true;
assertThat(deliveryGuarantee).isSameAs(DeliveryGuarantee.EXACTLY_ONCE);
} finally {
assertThat(failure).isEqualTo(deliveryGuarantee == DeliveryGuarantee.EXACTLY_ONCE);
}
}
@ParameterizedTest
@MethodSource("opensearchEmitters")
void testWriteJsonToOpensearch(
BiFunction<String, String, OpensearchEmitter<Tuple2<Integer, String>>> emitterProvider)
throws Exception {
final String index = "test-opensearch-sink-" + UUID.randomUUID();
runTest(index, false, emitterProvider, null);
}
@Test
void testRecovery() throws Exception {
final String index = "test-recovery-opensearch-sink";
runTest(index, true, TestEmitter::jsonEmitter, new FailingMapper());
assertThat(failed).isTrue();
}
private void runTest(
String index,
boolean allowRestarts,
BiFunction<String, String, OpensearchEmitter<Tuple2<Integer, String>>> emitterProvider,
@Nullable MapFunction<Long, Long> additionalMapper)
throws Exception {
runTest(
index,
allowRestarts,
emitterProvider,
DeliveryGuarantee.AT_LEAST_ONCE,
additionalMapper);
}
private void runTest(
String index,
boolean allowRestarts,
BiFunction<String, String, OpensearchEmitter<Tuple2<Integer, String>>> emitterProvider,
DeliveryGuarantee deliveryGuarantee,
@Nullable MapFunction<Long, Long> additionalMapper)
throws Exception {
final OpensearchSink<Tuple2<Integer, String>> sink =
new OpensearchSinkBuilder<>()
.setHosts(HttpHost.create(OS_CONTAINER.getHttpHostAddress()))
.setEmitter(emitterProvider.apply(index, context.getDataFieldName()))
.setBulkFlushMaxActions(5)
.setConnectionUsername(OS_CONTAINER.getUsername())
.setConnectionPassword(OS_CONTAINER.getPassword())
.setDeliveryGuarantee(deliveryGuarantee)
.setAllowInsecure(true)
.build();
final StreamExecutionEnvironment env = new LocalStreamEnvironment();
env.enableCheckpointing(100L);
if (!allowRestarts) {
env.setRestartStrategy(RestartStrategies.noRestart());
}
DataStream<Long> stream = env.fromSequence(1, 5);
if (additionalMapper != null) {
stream = stream.map(additionalMapper);
}
stream.map(
new MapFunction<Long, Tuple2<Integer, String>>() {
@Override
public Tuple2<Integer, String> map(Long value) throws Exception {
return Tuple2.of(
value.intValue(),
OpensearchTestClient.buildMessage(value.intValue()));
}
})
.sinkTo(sink);
env.execute();
context.assertThatIdsAreWritten(index, 1, 2, 3, 4, 5);
}
private static List<BiFunction<String, String, OpensearchEmitter<Tuple2<Integer, String>>>>
opensearchEmitters() {
return Arrays.asList(TestEmitter::jsonEmitter, TestEmitter::smileEmitter);
}
private static class FailingMapper implements MapFunction<Long, Long>, CheckpointListener {
private int emittedRecords = 0;
@Override
public Long map(Long value) throws Exception {
Thread.sleep(50);
emittedRecords++;
return value;
}
@Override
public void notifyCheckpointComplete(long checkpointId) throws Exception {
if (failed || emittedRecords == 0) {
return;
}
failed = true;
throw new Exception("Expected failure");
}
}
}
| 2,897 |
0 | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch/table/TestContext.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.connector.opensearch.table;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Schema;
import org.apache.flink.table.catalog.CatalogTable;
import org.apache.flink.table.catalog.Column;
import org.apache.flink.table.catalog.ObjectIdentifier;
import org.apache.flink.table.catalog.ResolvedCatalogTable;
import org.apache.flink.table.catalog.ResolvedSchema;
import org.apache.flink.table.factories.DynamicTableFactory;
import org.apache.flink.table.factories.FactoryUtil;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
/** A utility class for mocking {@link DynamicTableFactory.Context}. */
class TestContext {
private ResolvedSchema schema = ResolvedSchema.of(Column.physical("a", DataTypes.TIME()));
private final Map<String, String> options = new HashMap<>();
public static TestContext context() {
return new TestContext();
}
public TestContext withSchema(ResolvedSchema schema) {
this.schema = schema;
return this;
}
DynamicTableFactory.Context build() {
return new FactoryUtil.DefaultDynamicTableContext(
ObjectIdentifier.of("default", "default", "t1"),
new ResolvedCatalogTable(
CatalogTable.of(
Schema.newBuilder().fromResolvedSchema(schema).build(),
"mock context",
Collections.emptyList(),
options),
schema),
Collections.emptyMap(),
new Configuration(),
TestContext.class.getClassLoader(),
false);
}
public TestContext withOption(String key, String value) {
options.put(key, value);
return this;
}
}
| 2,898 |
0 | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch | Create_ds/flink-connector-opensearch/flink-connector-opensearch/src/test/java/org/apache/flink/connector/opensearch/table/KeyExtractorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.connector.opensearch.table;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.data.GenericRowData;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.data.StringData;
import org.apache.flink.table.data.TimestampData;
import org.junit.jupiter.api.Test;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.Collections;
import java.util.List;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.assertj.core.api.Assertions.assertThat;
/** Tests for {@link KeyExtractor}. */
class KeyExtractorTest {
@Test
public void testSimpleKey() {
List<LogicalTypeWithIndex> logicalTypesWithIndex =
Stream.of(
new LogicalTypeWithIndex(
0, DataTypes.BIGINT().notNull().getLogicalType()))
.collect(Collectors.toList());
Function<RowData, String> keyExtractor =
KeyExtractor.createKeyExtractor(logicalTypesWithIndex, "_");
String key = keyExtractor.apply(GenericRowData.of(12L, StringData.fromString("ABCD")));
assertThat(key).isEqualTo("12");
}
@Test
public void testNoPrimaryKey() {
List<LogicalTypeWithIndex> logicalTypesWithIndex = Collections.emptyList();
Function<RowData, String> keyExtractor =
KeyExtractor.createKeyExtractor(logicalTypesWithIndex, "_");
String key = keyExtractor.apply(GenericRowData.of(12L, StringData.fromString("ABCD")));
assertThat(key).isNull();
}
@Test
public void testTwoFieldsKey() {
List<LogicalTypeWithIndex> logicalTypesWithIndex =
Stream.of(
new LogicalTypeWithIndex(
0, DataTypes.BIGINT().notNull().getLogicalType()),
new LogicalTypeWithIndex(
2, DataTypes.TIMESTAMP().notNull().getLogicalType()))
.collect(Collectors.toList());
Function<RowData, String> keyExtractor =
KeyExtractor.createKeyExtractor(logicalTypesWithIndex, "_");
String key =
keyExtractor.apply(
GenericRowData.of(
12L,
StringData.fromString("ABCD"),
TimestampData.fromLocalDateTime(
LocalDateTime.parse("2012-12-12T12:12:12"))));
assertThat(key).isEqualTo("12_2012-12-12T12:12:12");
}
@Test
public void testAllTypesKey() {
List<LogicalTypeWithIndex> logicalTypesWithIndex =
Stream.of(
new LogicalTypeWithIndex(
0, DataTypes.TINYINT().notNull().getLogicalType()),
new LogicalTypeWithIndex(
1, DataTypes.SMALLINT().notNull().getLogicalType()),
new LogicalTypeWithIndex(
2, DataTypes.INT().notNull().getLogicalType()),
new LogicalTypeWithIndex(
3, DataTypes.BIGINT().notNull().getLogicalType()),
new LogicalTypeWithIndex(
4, DataTypes.BOOLEAN().notNull().getLogicalType()),
new LogicalTypeWithIndex(
5, DataTypes.FLOAT().notNull().getLogicalType()),
new LogicalTypeWithIndex(
6, DataTypes.DOUBLE().notNull().getLogicalType()),
new LogicalTypeWithIndex(
7, DataTypes.STRING().notNull().getLogicalType()),
new LogicalTypeWithIndex(
8, DataTypes.TIMESTAMP().notNull().getLogicalType()),
new LogicalTypeWithIndex(
9,
DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE()
.notNull()
.getLogicalType()),
new LogicalTypeWithIndex(
10, DataTypes.TIME().notNull().getLogicalType()),
new LogicalTypeWithIndex(
11, DataTypes.DATE().notNull().getLogicalType()))
.collect(Collectors.toList());
Function<RowData, String> keyExtractor =
KeyExtractor.createKeyExtractor(logicalTypesWithIndex, "_");
String key =
keyExtractor.apply(
GenericRowData.of(
(byte) 1,
(short) 2,
3,
(long) 4,
true,
1.0f,
2.0d,
StringData.fromString("ABCD"),
TimestampData.fromLocalDateTime(
LocalDateTime.parse("2012-12-12T12:12:12")),
TimestampData.fromInstant(Instant.parse("2013-01-13T13:13:13Z")),
(int) (LocalTime.parse("14:14:14").toNanoOfDay() / 1_000_000),
(int) LocalDate.parse("2015-05-15").toEpochDay()));
assertThat(key)
.isEqualTo(
"1_2_3_4_true_1.0_2.0_ABCD_2012-12-12T12:12:12_2013-01-13T13:13:13_14:14:14_2015-05-15");
}
}
| 2,899 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.