hexsha
stringlengths 40
40
| size
int64 8
1.04M
| content
stringlengths 8
1.04M
| avg_line_length
float64 2.24
100
| max_line_length
int64 4
1k
| alphanum_fraction
float64 0.25
0.97
|
|---|---|---|---|---|---|
ab953e248ec48269ba7cabecdd0b4617aa3a782b
| 1,430
|
package br.com.randomthings.dao;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import javax.transaction.Transactional;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import br.com.randomthings.domain.DomainEntity;
import br.com.randomthings.services.IService;
@Service
@Transactional
public class GenericServiceDao<entity extends DomainEntity> implements IDao<entity>{
@Autowired
private Map<String, IService> services;
@Override
public entity save(entity entity) {
return (entity) searchRepository(entity).save(entity);
}
@Override
public List<entity> find(entity entity) {
if(null != entity.getId()) {
return (List<entity>) Arrays.asList(searchRepository(entity).findByIdAndStatusTrue(entity.getId()));
}
return searchRepository(entity).findAllByStatusTrue();
}
@Override
public entity update(entity entity) {
return (entity) searchRepository(entity).update(entity);
}
@Override
public void delete(entity entity) {
searchRepository(entity).deleteById(entity.getId());
}
private IService searchRepository(entity entity){
for (Entry<String, IService> service : services.entrySet()) {
if(service.getKey().toLowerCase()
.equals(entity.getClass().getSimpleName().concat("ServiceImpl").toLowerCase())) {
return service.getValue();
}
}
return null;
}
}
| 25.087719
| 103
| 0.758042
|
fcf41b0ac2ce6c3827d0a708f0ef5d3d327958b5
| 101,055
|
/*
* ---------------------------------------------------------------------------------------------------------------------
* AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY - for any changes edit CharSegmentedSortedMultiset and regenerate
* ---------------------------------------------------------------------------------------------------------------------
*/
package io.deephaven.engine.table.impl.ssms;
import io.deephaven.base.verify.Assert;
import io.deephaven.chunk.attributes.Any;
import io.deephaven.vector.IntVector;
import io.deephaven.vector.IntVectorDirect;
import io.deephaven.vector.ObjectVector;
import io.deephaven.util.compare.IntComparisons;
import io.deephaven.util.type.ArrayTypeUtils;
import io.deephaven.engine.table.impl.by.SumIntChunk;
import io.deephaven.engine.table.impl.sort.timsort.TimsortUtils;
import io.deephaven.chunk.*;
import io.deephaven.chunk.attributes.ChunkLengths;
import io.deephaven.chunk.attributes.Values;
import io.deephaven.util.annotations.VisibleForTesting;
import gnu.trove.set.hash.TIntHashSet;
import org.apache.commons.lang3.mutable.MutableInt;
import java.util.Arrays;
import java.util.Objects;
import static io.deephaven.util.QueryConstants.NULL_INT;
public final class IntSegmentedSortedMultiset implements SegmentedSortedMultiSet<Integer>, IntVector {
private final int leafSize;
private int leafCount;
private int size;
private long totalSize;
private int minGallop = TimsortUtils.INITIAL_GALLOP;
/**
* If we have only a single leaf, then we use the directory arrays for the leaf values, otherwise we use it to
* track the largest value in a given leaf. The values are valid for 0 ... leafCount - 2, because the last leaf
* must accept any value that is greater than the second to last leave's maximum.
*/
private int [] directoryValues;
private long [] directoryCount;
private int [] leafSizes;
private int [][] leafValues;
private long [][] leafCounts;
// region Deltas
private transient boolean accumulateDeltas = false;
private transient TIntHashSet added;
private transient TIntHashSet removed;
private transient IntVector prevValues;
// endregion Deltas
//region Constructor
/**
* Create a IntSegmentedSortedArray with the given leafSize.
*
* @param leafSize the maximumSize for any leaf
*/
public IntSegmentedSortedMultiset(int leafSize) {
this.leafSize = leafSize;
leafCount = 0;
size = 0;
}
//endregion Constructor
//region Insertion
@Override
public boolean insert(WritableChunk<? extends Values> valuesToInsert, WritableIntChunk<ChunkLengths> counts) {
final long beforeSize = size();
insert(valuesToInsert.asWritableIntChunk(), counts);
return beforeSize != size();
}
private int insertExistingIntoLeaf(WritableIntChunk<? extends Values> valuesToInsert, WritableIntChunk<ChunkLengths> counts, int ripos, MutableInt wipos, int leafSize, int [] leafValues, long [] leafCounts, int maxInsert, boolean lastLeaf) {
int rlpos = 0;
int nextValue;
while (rlpos < leafSize && ripos < valuesToInsert.size() && (leq(nextValue = valuesToInsert.get(ripos), maxInsert) || lastLeaf)) {
if (gt(leafValues[rlpos], nextValue)) {
// we're not going to find nextValue in this leaf, so we skip over it
valuesToInsert.set(wipos.intValue(), nextValue);
counts.set(wipos.intValue(), counts.get(ripos));
wipos.increment();
ripos++;
} else {
rlpos = upperBound(leafValues, rlpos, leafSize, nextValue);
if (rlpos < leafSize) {
if (eq(leafValues[rlpos], nextValue)) {
leafCounts[rlpos] += counts.get(ripos);
ripos++;
}
} else if (rlpos == leafSize) {
// we have hit the end of the leaf, we can not insert any value that is less than maxvalue
final int lastInsert = lastLeaf ? valuesToInsert.size() : upperBound(valuesToInsert, ripos, valuesToInsert.size(), maxInsert);
// noinspection unchecked
valuesToInsert.copyFromTypedChunk((WritableIntChunk)valuesToInsert, ripos, wipos.intValue(), lastInsert - ripos);
counts.copyFromTypedChunk(counts, ripos, wipos.intValue(), lastInsert - ripos);
wipos.add(lastInsert - ripos);
ripos = lastInsert;
}
}
}
return ripos;
}
private void distributeNewIntoLeaves(IntChunk<? extends Values> valuesToInsert, IntChunk<ChunkLengths> counts, final int insertStart, final int insertCount, int firstLeaf, int requiredLeaves, int newLeafSize) {
Assert.gtZero(insertCount, "insertCount");
final int valuesPerLeaf = valuesPerLeaf(newLeafSize, requiredLeaves);
final int lastLeafSize = newLeafSize - valuesPerLeaf * (requiredLeaves - 1);
// we start at the back of the arrays, writing into each leaf as needed
int wleaf = firstLeaf + requiredLeaves - 1;
int wpos = lastLeafSize - 1;
int ripos = insertStart + insertCount - 1;
int rlpos = leafSizes[firstLeaf] - 1;
Assert.geq(leafValues[firstLeaf].length, "leafValues[firstLeaf].length", rlpos);
Assert.geq(leafCounts[firstLeaf].length, "leafCounts[firstLeaf].length", rlpos);
leafSizes[firstLeaf] = valuesPerLeaf;
for (int li = firstLeaf + 1; li < firstLeaf + requiredLeaves; ++li) {
leafValues[li] = new int[leafSize];
leafCounts[li] = new long[leafSize];
leafSizes[li] = valuesPerLeaf;
}
leafSizes[wleaf] = lastLeafSize;
int remaining = newLeafSize;
int lwins = 0;
int iwins = 0;
// starting at the last leaf, pull from either the chunk or the first leaf in the range as appropriate
while (remaining-- > 0) {
final int insertValue = valuesToInsert.get(ripos);
final int leafValue = leafValues[firstLeaf][rlpos];
final boolean useInsertValue = gt(insertValue, leafValue);
if (useInsertValue) {
leafValues[wleaf][wpos] = insertValue;
leafCounts[wleaf][wpos] = counts.get(ripos);
ripos--;
wpos--;
iwins++;
lwins = 0;
if (wpos >= 0 && iwins > minGallop) {
// find the smallest insert value greater than the leafValue, but do not go beyond the beginning of
// the leaf we are writing to
final int minInsert = gallopBound(valuesToInsert, Math.max(insertStart, ripos - wpos), ripos + 1, leafValue);
final int gallopLength = ripos - minInsert + 1;
if (gallopLength > 0) {
valuesToInsert.copyToTypedArray(minInsert, leafValues[wleaf], wpos - gallopLength + 1, gallopLength);
while (ripos >= minInsert) {
leafCounts[wleaf][wpos--] = counts.get(ripos--);
}
remaining -= gallopLength;
}
if (gallopLength < TimsortUtils.INITIAL_GALLOP) {
minGallop++;
} else {
minGallop = Math.max(2, minGallop - 1);
}
iwins = 0;
}
} else {
leafValues[wleaf][wpos] = leafValue;
leafCounts[wleaf][wpos] = leafCounts[firstLeaf][rlpos];
rlpos--;
wpos--;
lwins++;
iwins = 0;
if (lwins > minGallop) {
// find the smallest leaf value greater than the insertValue, but do not go beyond the beginning of
// the leaf we are writing to
final int minInsert = gallopBound(leafValues[firstLeaf], Math.max(0, rlpos - wpos), rlpos + 1, insertValue);
final int gallopLength = rlpos - minInsert + 1;
if (gallopLength > 0) {
System.arraycopy(leafValues[firstLeaf], minInsert, leafValues[wleaf], wpos - gallopLength + 1, gallopLength);
System.arraycopy(leafCounts[firstLeaf], minInsert, leafCounts[wleaf], wpos - gallopLength + 1, gallopLength);
rlpos -= gallopLength;
wpos -= gallopLength;
remaining -= gallopLength;
}
if (gallopLength < TimsortUtils.INITIAL_GALLOP) {
minGallop++;
} else {
minGallop = Math.max(2, minGallop - 1);
}
lwins = 0;
}
}
if (wpos < 0) {
// allocate the next leaf
if (wleaf < leafCount - 1) {
updateDirectory(wleaf);
}
wpos = valuesPerLeaf - 1;
wleaf--;
}
if (ripos < insertStart) {
// we have nothing left to insert, but still may need to distribute values between the leaves
break;
}
if (rlpos < 0) {
// we have no leaf values left, but still need to copy the insert values into leaves as appropriate
break;
}
}
if (ripos >= insertStart) {
assert rlpos < 0;
while (remaining > 0) {
// we want to copy wpos + 1 values at a time
final int copySize = wpos + 1;
valuesToInsert.copyToTypedArray(ripos - wpos, leafValues[wleaf], 0, copySize);
for (int ii = 0; ii < copySize; ++ii) {
leafCounts[wleaf][ii] = counts.get(ripos - (copySize - 1) + ii);
}
ripos -= copySize;
remaining -= copySize;
// allocate the next leaf
if (wleaf < leafCount - 1) {
updateDirectory(wleaf);
}
wpos = valuesPerLeaf - 1;
wleaf--;
}
}
else {
assert rlpos >= 0;
// we need to copy the rest of the leaf values
while (remaining > 0) {
final int copySize = wpos + 1;
System.arraycopy(leafValues[firstLeaf], rlpos - wpos, leafValues[wleaf], 0, copySize);
System.arraycopy(leafCounts[firstLeaf], rlpos - wpos, leafCounts[wleaf], 0, copySize);
rlpos -= copySize;
remaining -= copySize;
// allocate the next leaf
if (wleaf < leafCount - 1) {
updateDirectory(wleaf);
}
wpos = valuesPerLeaf - 1;
wleaf--;
}
}
size += insertCount;
}
private void insertNewIntoLeaf(WritableIntChunk<? extends Values> valuesToInsert, WritableIntChunk<ChunkLengths> counts, int insertStart, int insertCount, int leafSize, int [] leafValues, long [] leafCounts) {
assert insertCount > 0;
// we start at the end of the leaf and insert values, picking off the correct value as we go
int wpos = leafSize + insertCount - 1;
int ripos = insertStart + insertCount - 1;
int rlpos = leafSize - 1;
int lwins = 0; // leaf wins
int iwins = 0; // insert wins
while (wpos >= 0) {
final int insertValue = valuesToInsert.get(ripos);
final int leafValue = leafValues[rlpos];
if (gt(insertValue, leafValue)) {
leafValues[wpos] = insertValue;
leafCounts[wpos] = counts.get(ripos);
if (ripos == 0) {
// all that is left is the leaf so we are completed
return;
}
ripos--;
wpos--;
iwins++;
lwins = 0;
if (iwins > minGallop) {
final int minInsert = gallopBound(valuesToInsert, 0, ripos + 1, leafValue);
final int gallopLength = ripos - minInsert + 1;
if (gallopLength > 0) {
valuesToInsert.copyToTypedArray(minInsert, leafValues, wpos - gallopLength + 1, gallopLength);
while (ripos >= minInsert) {
leafCounts[wpos--] = counts.get(ripos--);
}
if (ripos == -1) {
return;
}
}
if (gallopLength < TimsortUtils.INITIAL_GALLOP) {
minGallop++;
} else {
minGallop = Math.max(2, minGallop - 1);
}
iwins = 0;
}
} else {
leafValues[wpos] = leafValue;
leafCounts[wpos] = leafCounts[rlpos];
if (rlpos == 0) {
// we just need to copy the remaining insert values to the leaf
copyRemainingValuesToLeaf(valuesToInsert, counts, insertStart, leafValues, leafCounts, ripos);
return;
}
rlpos--;
wpos--;
lwins++;
iwins = 0;
if (lwins > minGallop) {
final int minInsert = gallopBound(leafValues, 0, rlpos + 1, insertValue);
final int gallopLength = rlpos - minInsert + 1;
if (gallopLength > 0) {
System.arraycopy(leafValues, minInsert, leafValues, wpos - gallopLength + 1, gallopLength);
System.arraycopy(leafCounts, minInsert, leafCounts, wpos - gallopLength + 1, gallopLength);
rlpos -= gallopLength;
wpos -= gallopLength;
if (rlpos == -1) {
copyRemainingValuesToLeaf(valuesToInsert, counts, insertStart, leafValues, leafCounts, ripos);
return;
}
}
if (gallopLength < TimsortUtils.INITIAL_GALLOP) {
minGallop++;
} else {
minGallop = Math.max(2, minGallop - 1);
}
}
}
}
}
private void copyRemainingValuesToLeaf(WritableIntChunk<? extends Values> valuesToInsert, WritableIntChunk<ChunkLengths> counts, int insertStart, int[] leafValues, long[] leafCounts, int ripos) {
valuesToInsert.copyToTypedArray(insertStart, leafValues, 0, ripos - insertStart + 1);
for (int ii = 0; ii < ripos - insertStart + 1; ++ii) {
leafCounts[ii] = counts.get(ii + insertStart);
}
}
private void maybeCompact(WritableIntChunk<? extends Values> valuesToInsert, WritableIntChunk<ChunkLengths> counts, int ripos, int wipos) {
if (wipos == ripos) {
return;
}
// we've found something to compact away
final int originalSize = valuesToInsert.size();
final int toCopy = originalSize - ripos;
//noinspection unchecked - how the heck does this type not actuall work?
valuesToInsert.copyFromTypedChunk((IntChunk)valuesToInsert, ripos, wipos, toCopy);
counts.copyFromChunk(counts, ripos, wipos, toCopy);
valuesToInsert.setSize(wipos + toCopy);
counts.setSize(wipos + toCopy);
}
private void insertExisting(WritableIntChunk<? extends Values> valuesToInsert, WritableIntChunk<ChunkLengths> counts) {
if (leafCount == 0) {
return;
}
if (leafCount == 1) {
final MutableInt wipos = new MutableInt(0);
final int ripos = insertExistingIntoLeaf(valuesToInsert, counts, 0, wipos, size, directoryValues, directoryCount, NULL_INT, true);
maybeCompact(valuesToInsert, counts, ripos, wipos.intValue());
return;
}
// we have multiple leaves that we should insert into
final MutableInt wipos = new MutableInt(0);
int ripos = 0;
int nextLeaf = 0;
while (ripos < valuesToInsert.size()) {
final int startValue = valuesToInsert.get(ripos);
nextLeaf = lowerBoundExclusive(directoryValues, nextLeaf, leafCount - 1, startValue);
// find the thing in directoryValues
final boolean lastLeaf = nextLeaf == leafCount - 1;
final int maxValue = lastLeaf ? NULL_INT : directoryValues[nextLeaf];
ripos = insertExistingIntoLeaf(valuesToInsert, counts, ripos, wipos, leafSizes[nextLeaf], leafValues[nextLeaf], leafCounts[nextLeaf], maxValue, lastLeaf);
if (lastLeaf) {
break;
}
}
maybeCompact(valuesToInsert, counts, ripos, wipos.intValue());
}
private void insert(WritableIntChunk<? extends Values> valuesToInsert, WritableIntChunk<ChunkLengths> counts) {
validate();
validateInputs(valuesToInsert, counts);
if (valuesToInsert.size() == 0) {
return;
}
totalSize += SumIntChunk.sumIntChunk(counts, 0, counts.size());
if (leafCount == 0) {
// we are creating something brand new
makeLeavesInitial(valuesToInsert, counts);
maybeAccumulateAdditions(valuesToInsert);
validate();
return;
}
insertExisting(valuesToInsert, counts);
if (valuesToInsert.size() == 0) {
validate();
return;
}
maybeAccumulateAdditions(valuesToInsert);
if (leafCount > 1 && gt(valuesToInsert.get(0), getMaxInt())) {
doAppend(valuesToInsert, counts);
return;
}
final int newSize = valuesToInsert.size() + size;
final int desiredLeafCount = getDesiredLeafCount(newSize);
// now we are inserting things, which we know to be new
if (leafCount == 1) {
// if we are too small to fit the excess, increase our size
final int freeLocations = directoryValues.length - size;
if (freeLocations < valuesToInsert.size()) {
if (size + valuesToInsert.size() > leafSize) {
// we must move the directory into the first leaf
moveDirectoryToLeaf(desiredLeafCount);
} else {
directoryValues = Arrays.copyOf(directoryValues, newSize);
directoryCount = Arrays.copyOf(directoryCount, newSize);
}
}
if (desiredLeafCount == 1) {
// we should fit into the existing leaf
insertNewIntoLeaf(valuesToInsert, counts, 0, valuesToInsert.size(), size, directoryValues, directoryCount);
size = newSize;
validate();
return;
}
}
// this might not be enough, but we should at least start out with enough room for what we will insert
reallocateLeafArrays(desiredLeafCount);
int rpos = 0;
int nextLeaf = 0;
do {
final int insertValue = valuesToInsert.get(rpos);
// find out what leaf this belongs in
nextLeaf = leafCount > 1 ? upperBound(directoryValues, nextLeaf, leafCount - 1, insertValue) : 0;
// now figure out the last insert value that is suitable for this leaf
final int lastInsertValue;
if (nextLeaf == leafCount - 1) {
// we should insert all of the remaining values in this leaf
lastInsertValue = valuesToInsert.size();
} else {
final int lastLeafValue = directoryValues[nextLeaf];
lastInsertValue = upperBound(valuesToInsert, rpos, valuesToInsert.size(), lastLeafValue);
}
final int originalLeafSize = leafSizes[nextLeaf];
final int insertIntoLeaf = lastInsertValue - rpos;
final int newLeafSize = originalLeafSize + insertIntoLeaf;
final int requiredLeaves = getDesiredLeafCount(newLeafSize);
if (requiredLeaves > 1) {
// we need to make a hole for the new things
makeLeafHole(nextLeaf + 1, requiredLeaves - 1);
leafCount += (requiredLeaves - 1);
}
distributeNewIntoLeaves(valuesToInsert, counts, rpos, insertIntoLeaf, nextLeaf, requiredLeaves, newLeafSize);
rpos += insertIntoLeaf;
}
while (rpos < valuesToInsert.size());
validate();
}
private void moveDirectoryToLeaf(int desiredLeafCount) {
moveDirectoryToLeaf(desiredLeafCount, 0);
}
private void moveDirectoryToLeaf(int desiredLeafCount, int directoryLocation) {
leafValues = new int[desiredLeafCount][];
leafCounts = new long[desiredLeafCount][];
leafSizes = new int[desiredLeafCount];
leafSizes[directoryLocation] = size;
if (directoryValues.length < leafSize) {
leafValues[directoryLocation] = Arrays.copyOf(directoryValues, leafSize);
leafCounts[directoryLocation] = Arrays.copyOf(directoryCount, leafSize);
} else {
Assert.eq(directoryValues.length, "directoryValues.length", leafSize, "leafSize");
leafValues[directoryLocation] = directoryValues;
leafCounts[directoryLocation] = directoryCount;
}
leafCount = 1;
directoryCount = null;
directoryValues = new int[desiredLeafCount - 1];
}
private void doAppend(WritableIntChunk<? extends Values> valuesToInsert, WritableIntChunk<ChunkLengths> counts) {
// We are doing a special case of appending to the SSM
final int lastLeafIndex = leafCount - 1;
final int lastLeafSize = leafSizes[lastLeafIndex];
final int lastLeafFree = this.leafSize - lastLeafSize;
int rpos = 0;
if (lastLeafFree > 0) {
final int insertCount = Math.min(lastLeafFree, valuesToInsert.size());
insertNewIntoLeaf(valuesToInsert, counts, rpos, insertCount, lastLeafSize, leafValues[lastLeafIndex], leafCounts[lastLeafIndex]);
leafSizes[lastLeafIndex] += insertCount;
rpos += insertCount;
if (insertCount == valuesToInsert.size()) {
size += insertCount;
validate();
return;
}
}
final int newLeavesRequired = getDesiredLeafCount(valuesToInsert.size() - rpos);
reallocateLeafArrays(leafCount + newLeavesRequired);
// we need to fixup the directory from the last leaf
if (rpos > 0) {
directoryValues[lastLeafIndex] = valuesToInsert.get(rpos - 1);
} else {
assert leafSizes[lastLeafIndex] == leafSize;
directoryValues[lastLeafIndex] = leafValues[lastLeafIndex][leafSize - 1];
}
final int oldLeafCount = leafCount;
leafCount += newLeavesRequired;
packValuesIntoLeaves(valuesToInsert, counts, rpos, oldLeafCount, leafSize);
size += valuesToInsert.size();
validate();
}
private void copyLeavesAndDirectory(int srcPos, int destPos, int length) {
System.arraycopy(leafSizes, srcPos, leafSizes, destPos, length);
System.arraycopy(leafValues, srcPos, leafValues, destPos, length);
System.arraycopy(leafCounts, srcPos, leafCounts, destPos, length);
if (destPos > srcPos) {
System.arraycopy(directoryValues, srcPos, directoryValues, destPos, length - 1);
} else {
System.arraycopy(directoryValues, srcPos, directoryValues, destPos, length);
}
}
private void makeLeafHole(int holePosition, int holeSize) {
reallocateLeafArrays(holeSize + leafCount);
if (holePosition != leafCount) {
copyLeavesAndDirectory(holePosition, holePosition + holeSize, leafCount - holePosition);
}
// this is not strictly necessary; but will make debugging simpler
Arrays.fill(leafSizes, holePosition, holePosition + holeSize, 0);
Arrays.fill(leafValues, holePosition, holePosition + holeSize, null);
Arrays.fill(leafCounts, holePosition, holePosition + holeSize, null);
// region fillValue
if (holePosition + holeSize < leafValues.length) {
Arrays.fill(directoryValues, holePosition, holePosition + holeSize, NULL_INT);
} else {
Arrays.fill(directoryValues, holePosition, holePosition + holeSize - 1, NULL_INT);
}
// endregion fillValue
}
private void reallocateLeafArrays(int newSize) {
if (leafSizes.length < newSize) {
newSize = leafArraySize(newSize);
leafSizes = Arrays.copyOf(leafSizes, newSize);
leafValues = Arrays.copyOf(leafValues, newSize);
leafCounts = Arrays.copyOf(leafCounts, newSize);
directoryValues = Arrays.copyOf(directoryValues, newSize - 1);
}
}
private void allocateLeafArrays(int newSize) {
leafValues = new int[newSize][];
leafCounts = new long[newSize][];
leafSizes = new int[newSize];
directoryValues = new int[newSize - 1];
}
private int leafArraySize(int minimumSize) {
return Math.max(minimumSize, leafSizes.length * 2);
}
private void makeLeavesInitial(IntChunk<? extends Values> values, IntChunk<ChunkLengths> counts) {
leafCount = getDesiredLeafCount(values.size());
size = values.size();
if (leafCount == 1) {
directoryValues = new int[values.size()];
directoryCount = new long[values.size()];
values.copyToTypedArray(0, directoryValues, 0, values.size());
for (int ii = 0; ii < counts.size(); ++ii) {
directoryCount[ii] = counts.get(ii);
}
return;
}
allocateLeafArrays(leafCount);
final int valuesPerLeaf = valuesPerLeaf(values.size(), leafCount);
packValuesIntoLeaves(values, counts, 0, 0, valuesPerLeaf);
}
private void packValuesIntoLeaves(IntChunk<? extends Values> values, IntChunk<ChunkLengths> counts, int rpos, int startLeaf, int valuesPerLeaf) {
while (rpos < values.size()) {
final int thisLeafSize = Math.min(valuesPerLeaf, values.size() - rpos);
leafSizes[startLeaf] = thisLeafSize;
leafValues[startLeaf] = new int[leafSize];
values.copyToTypedArray(rpos, leafValues[startLeaf], 0, thisLeafSize);
leafCounts[startLeaf] = new long[leafSize];
for (int ii = 0; ii < thisLeafSize; ++ii) {
leafValues[startLeaf][ii] = values.get(rpos + ii);
leafCounts[startLeaf][ii] = counts.get(rpos + ii);
}
if (startLeaf < leafCount - 1) {
directoryValues[startLeaf] = leafValues[startLeaf][thisLeafSize - 1];
}
rpos += thisLeafSize;
startLeaf++;
}
}
//endregion
private void clear() {
leafCount = 0;
size = 0;
totalSize = 0;
leafValues = null;
leafCounts = null;
leafSizes = null;
directoryValues = null;
directoryCount = null;
}
//region Bounds search
/**
* Return the lowest index geq valuesToSearch.
*
* @param valuesToSearch the values to search for searchValue in
* @param lo the first index to search for
* @param hi one past the last index to search in
* @param searchValue the value to find
* @return the lowest index that is greater than or equal to valuesToSearch
*/
private static int lowerBound(int [] valuesToSearch, int lo, int hi, int searchValue) {
while (lo < hi) {
final int mid = (lo + hi) >>> 1;
final int testValue = valuesToSearch[mid];
final boolean moveLo = leq(testValue, searchValue);
if (moveLo) {
lo = mid;
if (lo == hi - 1) {
break;
}
} else {
hi = mid;
}
}
return lo;
}
/**
* Return the lowest index geq valuesToSearch.
*
* @param valuesToSearch the values to search for searchValue in
* @param lo the first index to search for
* @param hi one past the last index to search in
* @param searchValue the value to find
* @return the lowest index that is greater than or equal to valuesToSearch
*/
private static int gallopBound(IntChunk<? extends Any> valuesToSearch, int lo, int hi, int searchValue) {
while (lo < hi) {
final int mid = (lo + hi) >>> 1;
final int testValue = valuesToSearch.get(mid);
final boolean moveLo = leq(testValue, searchValue);
if (moveLo) {
if (mid == lo) {
return mid + 1;
}
lo = mid;
} else {
hi = mid;
}
}
return lo;
}
/**
* Return the lowest index geq valuesToSearch.
*
* @param valuesToSearch the values to search for searchValue in
* @param lo the first index to search for
* @param hi one past the last index to search in
* @param searchValue the value to find
* @return the lowest index that is greater than or equal to valuesToSearch
*/
private static int gallopBound(int [] valuesToSearch, int lo, int hi, int searchValue) {
while (lo < hi) {
final int mid = (lo + hi) >>> 1;
final int testValue = valuesToSearch[mid];
final boolean moveLo = leq(testValue, searchValue);
if (moveLo) {
if (mid == lo) {
return mid + 1;
}
lo = mid;
} else {
hi = mid;
}
}
return lo;
}
/**
* Return the highest index in valuesToSearch leq searchValue.
*
* @param valuesToSearch the values to search for searchValue in
* @param lo the first index to search for
* @param hi one past the last index to search in
* @param searchValue the value to find
* @return the highest index that is less than or equal to valuesToSearch
*/
private static int upperBound(int [] valuesToSearch, int lo, int hi, int searchValue) {
while (lo < hi) {
final int mid = (lo + hi) >>> 1;
final int testValue = valuesToSearch[mid];
final boolean moveHi = geq(testValue, searchValue);
if (moveHi) {
hi = mid;
} else {
lo = mid + 1;
}
}
return hi;
}
/**
* Return the highest index in valuesToSearch leq searchValue.
*
* @param valuesToSearch the values to search for searchValue in
* @param lo the first index to search for
* @param hi one past the last index to search in
* @param searchValue the value to find
* @return the highest index that is less than or equal to valuesToSearch
*/
private static int upperBound(IntChunk<? extends Values> valuesToSearch, int lo, int hi, int searchValue) {
while (lo < hi) {
final int mid = (lo + hi) >>> 1;
final int testValue = valuesToSearch.get(mid);
final boolean moveHi = gt(testValue, searchValue);
if (moveHi) {
hi = mid;
} else {
lo = mid + 1;
}
}
return hi;
}
/**
* Return the lowest index gt valuesToSearch.
*
* @param valuesToSearch the values to search for searchValue in
* @param lo the first index to search for
* @param hi one past the last index to search in
* @param searchValue the value to find
* @return the lowest index that is greater than to valuesToSearch
*/
private static int lowerBoundExclusive(int [] valuesToSearch, int lo, int hi, int searchValue) {
while (lo < hi) {
final int mid = (lo + hi) >>> 1;
final int testValue = valuesToSearch[mid];
final boolean moveLo = lt(testValue, searchValue);
if (moveLo) {
lo = mid + 1;
if (lo == hi) {
break;
}
} else {
hi = mid;
}
}
return lo;
}
//endregion
//region Removal
/**
* Remove valuesToRemove from this SSA. The valuesToRemove to remove must be sorted.
*
* @param valuesToRemove the valuesToRemove to remove
*/
@Override
public boolean remove(RemoveContext removeContext, WritableChunk<? extends Values> valuesToRemove, WritableIntChunk<ChunkLengths> counts) {
final long beforeSize = size();
remove(removeContext, valuesToRemove.asIntChunk(), counts);
return beforeSize != size();
}
private void remove(RemoveContext removeContext, IntChunk<? extends Values> valuesToRemove, IntChunk<ChunkLengths> counts) {
validate();
validateInputs(valuesToRemove, counts);
final int removeSize = valuesToRemove.size();
if (removeSize == 0) {
return;
}
totalSize -= SumIntChunk.sumIntChunk(counts, 0, counts.size());
if (leafCount == 1) {
final MutableInt sz = new MutableInt(size);
final int consumed = removeFromLeaf(removeContext, valuesToRemove, counts, 0, valuesToRemove.size(), directoryValues, directoryCount, sz);
assert consumed == valuesToRemove.size();
if (sz.intValue() == 0) {
clear();
} else {
size = sz.intValue();
}
} else {
removeContext.ensureLeafCount((leafCount + 1)/ 2);
int rpos = 0;
int nextLeaf = 0;
int cl = -1;
do {
// figure out what the first leaf we can remove something from is
final int firstValueToRemove = valuesToRemove.get(rpos);
nextLeaf = lowerBound(directoryValues, nextLeaf, leafCount - 1, firstValueToRemove);
final MutableInt sz = new MutableInt(leafSizes[nextLeaf]);
rpos = removeFromLeaf(removeContext, valuesToRemove, counts, rpos, valuesToRemove.size(), leafValues[nextLeaf], leafCounts[nextLeaf], sz);
size -= leafSizes[nextLeaf] - sz.intValue();
leafSizes[nextLeaf] = sz.intValue();
if (sz.intValue() == 0) {
cl = markLeafForRemoval(removeContext, nextLeaf, cl);
} else {
// we figure out if we can be pulled back into the prior leaf
final int priorLeaf;
if (cl >= 0 && removeContext.compactionLeafs[cl] + removeContext.compactionLeafLengths[cl] == nextLeaf) {
// we need to go to one leaf before our compaction length, if we happen to be removing all
// the prior leaves we end up with a negative number here.
priorLeaf = removeContext.compactionLeafs[cl] - 1;
} else {
priorLeaf = nextLeaf - 1;
}
if (priorLeaf >= 0 && leafSizes[priorLeaf] + leafSizes[nextLeaf] <= leafSize) {
final int priorAndCurrentSize = leafSizes[priorLeaf] + leafSizes[nextLeaf];
if (nextLeaf < leafCount - 1 && priorAndCurrentSize + leafSizes[nextLeaf + 1] <= leafSize) {
// we need to merge all three of these leaves
mergeThreeLeavesForward(priorLeaf, nextLeaf, nextLeaf + 1);
if (priorLeaf < nextLeaf - 1) {
// this means we should be adding a leaf to remove that is before a range of removals
cl = addLeafToLastRemovalRange(removeContext, priorLeaf, cl);
} else {
cl = markLeafForRemoval(removeContext, priorLeaf, cl);
}
cl = markLeafForRemoval(removeContext, nextLeaf, cl);
} else {
mergeTwoLeavesBack(priorLeaf, nextLeaf);
cl = markLeafForRemoval(removeContext, nextLeaf, cl);
}
}
else if (nextLeaf < leafCount - 1 && leafSizes[nextLeaf] + leafSizes[nextLeaf + 1] <= leafSize) {
// we shove ourselves forward into the next leaf
mergeTwoLeavesForward(nextLeaf, nextLeaf + 1);
cl = markLeafForRemoval(removeContext, nextLeaf, cl);
}
}
nextLeaf++;
validateCompaction(removeContext, cl);
}
while (rpos < valuesToRemove.size());
if (size == 0) {
clear();
} else {
compactLeafs(removeContext, cl);
}
}
validate();
}
private void validateCompaction(RemoveContext removeContext, int cl) {
for (int ii = 0; ii <= cl - 1; ++ii) {
final int firstCompactLeaf = removeContext.compactionLeafs[ii];
final int lastCompactLeaf = firstCompactLeaf + removeContext.compactionLeafLengths[ii] - 1;
final int nextCompactLeaf = removeContext.compactionLeafs[ii + 1];
Assert.gt(nextCompactLeaf, "nextCompactLeaf", lastCompactLeaf, "lastCompactLeaf");
}
}
private void mergeTwoLeavesBack(int firstLeafDestination, int secondLeafSource) {
final int wpos = leafSizes[firstLeafDestination];
final int secondSourceSize = leafSizes[secondLeafSource];
System.arraycopy(leafValues[secondLeafSource], 0, leafValues[firstLeafDestination], wpos, secondSourceSize);
System.arraycopy(leafCounts[secondLeafSource], 0, leafCounts[firstLeafDestination], wpos, secondSourceSize);
leafSizes[firstLeafDestination] += secondSourceSize;
leafSizes[secondLeafSource] = 0;
if (secondLeafSource < leafCount - 1) {
directoryValues[firstLeafDestination] = directoryValues[secondLeafSource];
}
}
private void mergeTwoLeavesForward(int firstLeafSource, int secondLeafDestination) {
final int firstSourceSize = leafSizes[firstLeafSource];
final int secondDestinationSize = leafSizes[secondLeafDestination];
// first make a hole
System.arraycopy(leafValues[secondLeafDestination], 0, leafValues[secondLeafDestination], firstSourceSize, secondDestinationSize);
System.arraycopy(leafCounts[secondLeafDestination], 0, leafCounts[secondLeafDestination], firstSourceSize, secondDestinationSize);
// now copy the first leaf into that hole
System.arraycopy(leafValues[firstLeafSource], 0, leafValues[secondLeafDestination], 0, firstSourceSize);
System.arraycopy(leafCounts[firstLeafSource], 0, leafCounts[secondLeafDestination], 0, firstSourceSize);
leafSizes[secondLeafDestination] += firstSourceSize;
leafSizes[firstLeafSource] = 0;
// the directory values should be ignored at this point, and is marked for removal
}
private void mergeThreeLeavesForward(int firstLeafSource, int secondLeafSource, int thirdLeafDestination) {
final int firstSourceSize = leafSizes[firstLeafSource];
final int secondSourceSize = leafSizes[secondLeafSource];
final int totalSourceSize = firstSourceSize + secondSourceSize;
final int thirdDestinationSize = leafSizes[thirdLeafDestination];
// first make a hole
System.arraycopy(leafValues[thirdLeafDestination], 0, leafValues[thirdLeafDestination], totalSourceSize, thirdDestinationSize);
System.arraycopy(leafCounts[thirdLeafDestination], 0, leafCounts[thirdLeafDestination], totalSourceSize, thirdDestinationSize);
// now copy the first leaf into that hole
System.arraycopy(leafValues[firstLeafSource], 0, leafValues[thirdLeafDestination], 0, firstSourceSize);
System.arraycopy(leafCounts[firstLeafSource], 0, leafCounts[thirdLeafDestination], 0, firstSourceSize);
System.arraycopy(leafValues[secondLeafSource], 0, leafValues[thirdLeafDestination], firstSourceSize, secondSourceSize);
System.arraycopy(leafCounts[secondLeafSource], 0, leafCounts[thirdLeafDestination], firstSourceSize, secondSourceSize);
leafSizes[thirdLeafDestination] += totalSourceSize;
leafSizes[firstLeafSource] = 0;
leafSizes[secondLeafSource] = 0;
// the directory values should be ignored at this point, and is marked for removal
}
private int markLeafForRemoval(RemoveContext removeContext, int leafToRemove, int cl) {
validateCompaction(removeContext, cl);
// we've removed all values in this leaf, so we need to mark it for deletion from our list
if (cl == -1) {
removeContext.compactionLeafs[cl = 0] = leafToRemove;
removeContext.compactionLeafLengths[cl] = 1;
} else if (removeContext.compactionLeafs[cl] + removeContext.compactionLeafLengths[cl] == leafToRemove) {
removeContext.compactionLeafLengths[cl]++;
} else {
removeContext.compactionLeafs[++cl] = leafToRemove;
removeContext.compactionLeafLengths[cl] = 1;
}
validateCompaction(removeContext, cl);
return cl;
}
private int addLeafToLastRemovalRange(RemoveContext removeContext, int leafToRemove, int cl) {
validateCompaction(removeContext, cl);
assert cl >= 0;
// we've removed all values in this leaf, so we need to mark it for deletion from our list
assert removeContext.compactionLeafs[cl] == leafToRemove + 1;
removeContext.compactionLeafs[cl]--;
removeContext.compactionLeafLengths[cl]++;
// we might need to collapse two adjacent ranges in the compaction
if (cl > 0 && removeContext.compactionLeafs[cl - 1] + removeContext.compactionLeafLengths[cl - 1] == removeContext.compactionLeafs[cl]) {
removeContext.compactionLeafLengths[cl - 1] += removeContext.compactionLeafLengths[cl];
cl--;
}
validateCompaction(removeContext, cl);
return cl;
}
private void compactLeafs(RemoveContext removeContext, int cl) {
assert removeContext != null;
int removed = 0;
for (int cli = 0; cli <= cl; cli++) {
final int removeSize = removeContext.compactionLeafLengths[cli];
final int rposc = removeContext.compactionLeafs[cli] + removeSize;
final int wpos = removeContext.compactionLeafs[cli] - removed;
removed += removeSize;
if (rposc <= leafCount) {
// we are not removing everything, so have to copy
final int lastrposc;
if (cli < cl) {
lastrposc = removeContext.compactionLeafs[cli + 1];
} else {
lastrposc = leafCount;
}
System.arraycopy(leafValues, rposc, leafValues, wpos, lastrposc - rposc);
System.arraycopy(leafCounts, rposc, leafCounts, wpos, lastrposc - rposc);
System.arraycopy(leafSizes, rposc, leafSizes, wpos, lastrposc - rposc);
if (rposc < leafCount - 1) {
final int lastrposd = Math.min(lastrposc, leafCount - 1);
System.arraycopy(directoryValues, rposc, directoryValues, wpos, lastrposd - rposc);
}
}
}
Arrays.fill(leafValues, leafCount - removed, leafCount, null); // be friendly to our GC
Arrays.fill(leafCounts, leafCount - removed, leafCount, null);
Arrays.fill(leafSizes, leafCount - removed, leafCount, 0); // not necessary, but nice for debugging
leafCount -= removed;
maybePromoteLastLeaf();
}
private void maybePromoteLastLeaf() {
if (leafCount == 1) {
directoryValues = leafValues[0];
directoryCount = leafCounts[0];
leafValues = null;
leafCounts = null;
leafSizes = null;
if (directoryValues.length > size * 2) {
directoryValues = Arrays.copyOf(directoryValues, size);
directoryCount = Arrays.copyOf(directoryCount, size);
}
}
}
private int removeFromLeaf(RemoveContext removeContext, IntChunk<? extends Values> valuesToRemove, IntChunk<ChunkLengths> counts, int ripos, int end, int[] leafValues, long[] leafCounts, MutableInt sz) {
int rlpos = 0;
int cl = -1;
while (ripos < end) {
final int removeValue = valuesToRemove.get(ripos);
rlpos = upperBound(leafValues, rlpos, sz.intValue(), removeValue);
if (rlpos == sz.intValue()) {
break;
}
leafCounts[rlpos] -= counts.get(ripos);
Assert.geqZero(leafCounts[rlpos], "leafCounts[rlpos]");
if (leafCounts[rlpos] == 0) {
maybeAccumulateRemoval(removeValue);
// we need to do some compaction at the end of this iteration
if (cl == -1) {
removeContext.compactionLocations[cl = 0] = rlpos;
removeContext.compactionLengths[cl] = 1;
} else {
final int nextCompact = removeContext.compactionLocations[cl] + removeContext.compactionLengths[cl];
if (nextCompact == rlpos) {
removeContext.compactionLengths[cl]++;
} else {
removeContext.compactionLocations[++cl] = rlpos;
removeContext.compactionLengths[cl] = 1;
}
}
}
ripos++;
for (int cli = 0; cli < cl; ++cli) {
if (removeContext.compactionLocations[cli] + removeContext.compactionLengths[cli] == removeContext.compactionLocations[cli + 1]) {
throw new IllegalStateException();
}
}
}
if (cl == 0 && removeContext.compactionLengths[0] == sz.intValue()) {
// we've removed everything, so no need to compact
sz.setValue(0);
return ripos;
}
final int removed = compactValues(removeContext, leafValues, leafCounts, sz.intValue(), cl);
sz.subtract(removed);
return ripos;
}
private int compactValues(RemoveContext removeContext, int[] leafValues, long[] leafCounts, int sz, int cl) {
int removed = 0;
for (int cli = 0; cli <= cl; cli++) {
final int removeSize = removeContext.compactionLengths[cli];
final int rpos = removeContext.compactionLocations[cli] + removeSize;
final int wpos = removeContext.compactionLocations[cli] - removed;
removed += removeSize;
if (rpos <= sz) {
// we are not removing everything, so have to copy
final int lastrpos;
if (cli < cl) {
lastrpos = removeContext.compactionLocations[cli + 1];
} else {
lastrpos = sz;
}
System.arraycopy(leafValues, rpos, leafValues, wpos, lastrpos - rpos);
System.arraycopy(leafCounts, rpos, leafCounts, wpos, lastrpos - rpos);
}
}
return removed;
}
//endregion
//region Validation
@VisibleForTesting
public void validate() {
if (!SEGMENTED_SORTED_MULTISET_VALIDATION) {
return;
}
validateInternal();
}
private void validateInputs(IntChunk<? extends Values> valuesToInsert, IntChunk<ChunkLengths> counts) {
if (!SEGMENTED_SORTED_MULTISET_VALIDATION) {
return;
}
Assert.eq(valuesToInsert.size(), "valuesToInsert.size()", counts.size(), "counts.size()");
if (counts.size() > 0) {
Assert.gtZero(counts.get(0), "counts.get(ii)");
}
for (int ii = 1; ii < valuesToInsert.size(); ++ii) {
Assert.gtZero(counts.get(ii), "counts.get(ii)");
final int prevValue = valuesToInsert.get(ii - 1);
final int curValue = valuesToInsert.get(ii);
Assert.assertion(IntComparisons.lt(prevValue, curValue), "IntComparisons.lt(prevValue, curValue)", prevValue, "prevValue", curValue, "curValue");
}
}
private void validateInternal() {
Assert.geqZero(size, "size");
Assert.geqZero(totalSize, "totalSize");
if (size == 0) {
Assert.eqZero(leafCount, "leafCount");
} else {
Assert.gtZero(leafCount, "leafCount");
}
Assert.geq(totalSize, "totalSize", size, "size");
if (leafCount == 0) {
Assert.eqNull(leafValues, "leafValues");
Assert.eqNull(leafCounts, "leafValues");
Assert.eqNull(leafSizes, "leafSizes");
Assert.eqNull(directoryCount, "directoryIndex");
Assert.eqNull(directoryValues, "directoryValues");
} else if (leafCount == 1) {
Assert.eqNull(leafValues, "leafValues");
Assert.eqNull(leafCounts, "leafValues");
Assert.eqNull(leafSizes, "leafSizes");
Assert.neqNull(directoryCount, "directoryIndex");
Assert.neqNull(directoryValues, "directoryValues");
Assert.geq(directoryCount.length, "directoryIndex.length", size, "size");
Assert.geq(directoryValues.length, "directoryValues.length", size, "size");
Assert.leq(directoryCount.length, "directoryIndex.length", leafSize, "leafSize");
Assert.leq(directoryValues.length, "directoryValues.length", leafSize, "leafSize");
validateLeaf(directoryValues, directoryCount, size);
long totalCounts = 0;
for (int ii = 0; ii < size; ++ii) {
totalCounts += directoryCount[ii];
}
Assert.eq(totalCounts, "totalCounts", totalSize, "totalSize");
} else {
Assert.neqNull(leafValues, "leafValues");
Assert.neqNull(leafCounts, "leafValues");
Assert.neqNull(leafSizes, "leafSizes");
Assert.eqNull(directoryCount, "directoryIndex");
Assert.neqNull(directoryValues, "directoryValues");
Assert.geq(directoryValues.length, "directoryValues.length", leafCount - 1, "leafCount - 1");
Assert.geq(leafSizes.length, "directoryValues.length", leafCount, "leafCount");
Assert.geq(leafValues.length, "directoryValues.length", leafCount, "leafCount");
Assert.geq(leafCounts.length, "directoryValues.length", leafCount, "leafCount");
Assert.eq(computeLeafSizes(), "computeLeafSizes()", size, "size");
Assert.eq(computeTotalSize(), "computeTotalSize()", totalSize, "totalSize");
for (int ii = 0; ii < leafCount; ++ii) {
validateLeaf(ii);
final int lastValue = leafValues[ii][leafSizes[ii] - 1];
if (ii < leafCount - 1) {
final int directoryValue = directoryValues[ii];
Assert.assertion(leq(lastValue, directoryValue), "lt(lastValue, directoryValue)", lastValue, "leafValues[ii][leafSizes[ii] - 1]", directoryValue, "directoryValue");
if (ii < leafCount - 2) {
final int nextDirectoryValue = directoryValues[ii + 1];
Assert.assertion(lt(directoryValue, nextDirectoryValue), "lt(directoryValue, nextDirectoryValue)", directoryValue, "directoryValue", nextDirectoryValue, "nextDirectoryValue");
}
final int nextFirstValue = leafValues[ii + 1][0];
Assert.assertion(lt(directoryValue, nextFirstValue), "lt(directoryValue, nextFirstValue)", directoryValue, "directoryValue", nextFirstValue, "nextFirstValue");
}
// It would be nice to enable an assertion to make sure we are dense after removals, but the other
// reason this assertion can fail is that if we insert into a node that is too large we may have to
// split it. The last node we have could be short, and it might be possible to merge it with the node
// afterwards, but we don't do removals during an insertion phase.
// if (ii < leafCount - 1) {
// final int thisLeafSize = leafSizes[ii];
// final int nextLeafSize = leafSizes[ii + 1];
// Assert.leq(leafSize, "leafSize", thisLeafSize + nextLeafSize, "thisLeafSize + nextLeafSize");
// }
}
validateLeafOrdering();
}
}
private void validateLeafOrdering() {
for (int leaf = 0; leaf < leafCount - 1; ++leaf) {
final int lastValue = leafValues[leaf][leafSizes[leaf] - 1];
final int nextValue = leafValues[leaf + 1][0];
Assert.assertion(lt(lastValue, nextValue), lastValue + " < " + nextValue);
}
}
private void validateLeaf(int leaf) {
Assert.eq(leafValues[leaf].length, "leafValues[leaf].length", leafSize);
Assert.eq(leafCounts[leaf].length, "leafCounts[leaf].length", leafSize);
validateLeaf(leafValues[leaf], leafCounts[leaf], leafSizes[leaf]);
}
private static void validateLeaf(int[] values, long[] counts, int size) {
Assert.gtZero(size, "size");
for (int ii = 0; ii < size - 1; ++ii) {
Assert.gtZero(counts[ii], "counts[ii]");
final int thisValue = values[ii];
final int nextValue = values[ii + 1];
Assert.assertion(lt(values[ii], values[ii + 1]), "lt(values[ii], values[ii + 1])", (Integer)thisValue, "values[ii]", (Integer)nextValue, "values[ii + 1]", ii, "ii");
}
if (size > 0) {
Assert.gtZero(counts[size - 1], "counts[size - 1]");
}
}
private int computeLeafSizes() {
int expectedSize = 0;
for (int ii = 0; ii < leafCount; ++ii) {
expectedSize += leafSizes[ii];
}
return expectedSize;
}
private int computeTotalSize() {
int expectedSize = 0;
for (int ii = 0; ii < leafCount; ++ii) {
for (int jj = 0; jj < leafSizes[ii]; ++jj) {
expectedSize += leafCounts[ii][jj];
}
}
return expectedSize;
}
//endregion
//region Comparisons
private int getDesiredLeafCount(int newSize) {
return (newSize + leafSize - 1) / leafSize;
}
private static int valuesPerLeaf(int values, int leafCount) {
return (values + leafCount - 1) / leafCount;
}
private static int doComparison(int lhs, int rhs) {
return IntComparisons.compare(lhs, rhs);
}
private static boolean gt(int lhs, int rhs) {
return doComparison(lhs, rhs) > 0;
}
private static boolean lt(int lhs, int rhs) {
return doComparison(lhs, rhs) < 0;
}
private static boolean leq(int lhs, int rhs) {
return doComparison(lhs, rhs) <= 0;
}
private static boolean geq(int lhs, int rhs) {
return doComparison(lhs, rhs) >= 0;
}
private static boolean eq(int lhs, int rhs) {
// region equality function
return lhs == rhs;
// endregion equality function
}
//endregion
@Override
public long totalSize() { return totalSize; }
@Override
public int getNodeSize() {
return leafSize;
}
@Override
public Integer getMin() {
return getMinInt();
}
@Override
public Integer getMax() {
return getMaxInt();
}
public int getMinInt() {
if (leafCount == 0) {
throw new IllegalStateException();
}
else if (leafCount == 1) {
return directoryValues[0];
}
return leafValues[0][0];
}
@Override
public long getMinCount() {
if (leafCount == 0) {
throw new IllegalStateException();
}
else if (leafCount == 1) {
return directoryCount[0];
}
return leafCounts[0][0];
}
private void addMinCount(long toAdd) {
if (leafCount == 0) {
throw new IllegalStateException();
}
else if (leafCount == 1) {
directoryCount[0] += toAdd;
} else {
leafCounts[0][0] += toAdd;
}
totalSize += toAdd;
}
private void removeMin() {
if (size == 1) {
clear();
return;
}
if (leafCount == 1) {
totalSize -= directoryCount[0];
System.arraycopy(directoryValues, 1, directoryValues, 0, size - 1);
System.arraycopy(directoryCount, 1, directoryCount, 0, size - 1);
} else {
totalSize -= leafCounts[0][0];
System.arraycopy(leafValues[0], 1, leafValues[0], 0, leafSizes[0] - 1);
System.arraycopy(leafCounts[0], 1, leafCounts[0], 0, leafSizes[0] - 1);
leafSizes[0]--;
if (leafSizes[0] == 0) {
// we need to remove this leaf
leafCount--;
System.arraycopy(leafValues, 1, leafValues, 0, leafCount);
System.arraycopy(leafCounts, 1, leafCounts, 0, leafCount);
System.arraycopy(leafSizes, 1, leafSizes, 0, leafCount);
System.arraycopy(directoryValues, 1, directoryValues, 0, leafCount - 1);
maybePromoteLastLeaf();
}
}
size--;
}
public int getMaxInt() {
if (leafCount == 0) {
throw new IllegalStateException();
}
else if (leafCount == 1) {
return directoryValues[size - 1];
}
return leafValues[leafCount - 1][leafSizes[leafCount - 1] - 1];
}
@Override
public long getMaxCount() {
if (leafCount == 0) {
throw new IllegalStateException();
}
else if (leafCount == 1) {
return directoryCount[size - 1];
}
return leafCounts[leafCount - 1][leafSizes[leafCount - 1] - 1];
}
private void addMaxCount(long toAdd) {
if (leafCount == 0) {
throw new IllegalStateException();
}
else if (leafCount == 1) {
directoryCount[size - 1] += toAdd;
} else {
leafCounts[leafCount - 1][leafSizes[leafCount - 1] - 1] += toAdd;
}
totalSize += toAdd;
}
private void removeMax() {
if (size == 1) {
clear();
return;
}
if (leafCount > 1) {
totalSize -= leafCounts[leafCount - 1][leafSizes[leafCount - 1] - 1];
leafSizes[leafCount - 1]--;
size--;
if (leafSizes[leafCount - 1] == 0) {
leafCount--;
maybePromoteLastLeaf();
}
} else {
totalSize -= directoryCount[size - 1];
size--;
}
}
//region Moving
@Override
public void moveFrontToBack(SegmentedSortedMultiSet untypedDestination, long count) {
final IntSegmentedSortedMultiset destination = (IntSegmentedSortedMultiset)untypedDestination;
validate();
destination.validate();
Assert.eq(leafSize, "leafSize", destination.leafSize, "destination.leafSize");
Assert.gtZero(leafCount, "leafCount");
if (count == 0) {
return;
}
if (SEGMENTED_SORTED_MULTISET_VALIDATION) {
if (destination.size > 0) {
Assert.assertion(geq(getMinInt(), destination.getMaxInt()), "geq(getMinInt(), destination.getMaxInt())");
}
}
if (destination.size > 0 && eq(getMinInt(), destination.getMaxInt())) {
final long minCount = getMinCount();
final long toAdd;
if (minCount > count) {
toAdd = count;
addMinCount(-count);
} else {
toAdd = minCount;
removeMin();
}
destination.addMaxCount(toAdd);
count -= toAdd;
}
if (count == 0) {
validate();
destination.validate();
return;
}
final MutableInt remaining = new MutableInt(count);
final MutableInt leftOverMutable = new MutableInt();
int totalUniqueToMove = 0;
int partialUnique = 0;
int rleaf = 0;
if (leafCount == 1) {
// we need to move this many entries (the last one may be partial)
totalUniqueToMove = countFront(directoryCount, size, remaining, leftOverMutable);
if (remaining.intValue() > 0) {
throw new IllegalStateException();
}
if (totalUniqueToMove == size) {
partialUnique = 0;
rleaf = 1;
} else {
partialUnique = totalUniqueToMove;
}
} else {
while (remaining.intValue() > 0) {
final int uniqueToMove = countFront(leafCounts[rleaf], leafSizes[rleaf], remaining, leftOverMutable);
totalUniqueToMove += uniqueToMove;
if (uniqueToMove == leafSizes[rleaf]) {
rleaf++;
} else {
partialUnique = uniqueToMove;
}
}
}
final boolean appendToExtra = destination.prepareAppend(partialUnique, rleaf);
final int leftOver = leftOverMutable.intValue();
if (rleaf > 0) {
int wleaf = destination.leafCount;
// we can move full leaves to start
if (leafCount == 1) {
Assert.eqZero(partialUnique, "partialUnique");
if (wleaf > 0) {
destination.updateDirectory(wleaf - 1);
}
destination.leafValues[wleaf] = Arrays.copyOf(directoryValues, leafSize);
destination.leafCounts[wleaf] = Arrays.copyOf(directoryCount, leafSize);
destination.leafSizes[wleaf] = size;
destination.size += size;
if (leftOver > 0) {
directoryCount[0] = leftOver;
directoryValues[0] = directoryValues[size - 1];
destination.leafCounts[wleaf][destination.leafSizes[wleaf] - 1] -= leftOver;
Assert.gtZero(destination.leafCounts[wleaf][destination.leafSizes[wleaf] - 1], "destination.leafCounts[wleaf][destination.leafSizes[wleaf] - 1]");
size = 1;
} else {
directoryValues = null;
directoryCount = null;
size = 0;
}
if (wleaf > 0 && destination.leafSizes[wleaf] + destination.leafSizes[wleaf - 1] <= leafSize) {
destination.mergeTwoLeavesBack(wleaf - 1, wleaf);
} else {
wleaf++;
}
// we don't want to do the final copy
rleaf = 0;
} else {
for (int rli = 0; rli < rleaf; ++rli) {
destination.leafValues[wleaf] = leafValues[rli];
destination.leafCounts[wleaf] = leafCounts[rli];
if (wleaf > 0) {
destination.updateDirectory(wleaf - 1);
}
destination.leafSizes[wleaf] = leafSizes[rli];
wleaf++;
destination.size += leafSizes[rli];
if (rli == rleaf - 1 && leftOver > 0 && partialUnique == 0) {
final int sizeOfLeftOverLeaf = leafSizes[rli];
size -= (sizeOfLeftOverLeaf - 1);
final int [] tmpValues = new int[leafSize];
final long [] tmpCounts = new long[leafSize];
tmpValues[0] = leafValues[rli][sizeOfLeftOverLeaf - 1];
tmpCounts[0] = leftOver;
leafValues[rli] = tmpValues;
leafCounts[rli] = tmpCounts;
leafSizes[rli] = 1;
destination.leafCounts[wleaf - 1][sizeOfLeftOverLeaf - 1] -= leftOver;
Assert.gtZero(destination.leafCounts[wleaf - 1][sizeOfLeftOverLeaf - 1], "destination.leafCounts[wleaf - 1][sizeOfLeftOverLeaf - 1]");
if (rli < leafCount - 1) {
updateDirectory(rli);
}
} else {
size -= leafSizes[rli];
leafSizes[rli] = 0;
leafValues[rli] = null;
leafCounts[rli] = null;
}
// if we can actually fit the last two leaves within a single leaf, take advantage of it
if (wleaf > 1 && destination.leafSizes[wleaf - 1] + destination.leafSizes[wleaf - 2] <= leafSize) {
destination.mergeTwoLeavesBack(wleaf - 2, wleaf - 1);
wleaf--;
}
}
if (destination.directoryValues.length >= wleaf) {
destination.updateDirectory(wleaf - 1);
}
}
destination.leafCount = wleaf;
if (partialUnique == 0) {
destination.maybePromoteLastLeaf();
}
}
boolean sourceLeavesMerged = false;
if (partialUnique > 0) {
final int [] sourceValues;
final long [] sourceCounts;
final int [] destinationValues;
final long [] destinationCounts;
final int copySize;
final int destOffset;
if (leafCount == 1) {
sourceValues = directoryValues;
sourceCounts = directoryCount;
copySize = size;
} else {
sourceValues = leafValues[rleaf];
sourceCounts = leafCounts[rleaf];
copySize = leafSizes[rleaf];
}
assert sourceValues != null;
assert sourceCounts != null;
final int wleaf;
if (appendToExtra) {
wleaf = destination.leafCount;
destinationValues = destination.leafValues[wleaf] = new int[leafSize];
destinationCounts = destination.leafCounts[wleaf] = new long[leafSize];
destOffset = 0;
destination.leafSizes[wleaf] = partialUnique;
destination.leafCount++;
} else {
if (destination.directoryCount == null) {
wleaf = destination.leafCount - 1;
destOffset = destination.leafSizes[wleaf];
destinationValues = destination.leafValues[wleaf];
destinationCounts = destination.leafCounts[wleaf];
destination.leafSizes[wleaf] += partialUnique;
} else {
wleaf = -1;
destOffset = destination.size;
destinationValues = destination.directoryValues;
destinationCounts = destination.directoryCount;
destination.leafCount = 1;
}
}
// copy from the final leaf to copy into the result
System.arraycopy(sourceValues, 0, destinationValues, destOffset, partialUnique);
System.arraycopy(sourceCounts, 0, destinationCounts, destOffset, partialUnique);
// we are always in the last leaf, so no directory values to fix up
destination.size += partialUnique;
if (leftOver > 0) {
if (destination.directoryCount == null) {
destination.leafCounts[wleaf][destOffset + partialUnique - 1] -= leftOver;
Assert.gtZero(destination.leafCounts[wleaf][destOffset + partialUnique - 1], "destination.leafCounts[wleaf][destOffset + partialUnique - 1]");
} else {
destination.directoryCount[destination.size - 1] -= leftOver;
Assert.gtZero(destination.directoryCount[destination.size - 1], "destination.directoryCount[destination.size]");
}
}
if (wleaf > destination.leafCount - 1) {
destination.updateDirectory(wleaf);
}
final int leftOverSlot = leftOver > 0 ? 1 : 0;
// compact the remaining leaf
final int compactCopySize = copySize - partialUnique + leftOverSlot;
System.arraycopy(sourceValues, partialUnique - leftOverSlot, sourceValues, 0, compactCopySize);
System.arraycopy(sourceCounts, partialUnique - leftOverSlot, sourceCounts, 0, compactCopySize);
if (leftOver > 0) {
sourceCounts[0] = leftOver;
}
final int sizeChange = partialUnique - leftOverSlot;
size -= sizeChange;
if (leafCount > 1) {
leafSizes[rleaf] -= sizeChange;
}
// possibly merge source
if (rleaf < leafCount - 1 && leafSizes[rleaf] + leafSizes[rleaf + 1] < leafSize) {
mergeTwoLeavesForward(rleaf, rleaf + 1);
sourceLeavesMerged = true;
}
// if we can actually fit the last two leaves within a single leaf, take advantage of it
if (wleaf >= 1 && destination.leafSizes[wleaf] + destination.leafSizes[wleaf - 1] <= leafSize) {
destination.mergeTwoLeavesBack(wleaf - 1, wleaf);
destination.leafCount--;
destination.maybePromoteLastLeaf();
}
}
if ((sourceLeavesMerged || rleaf > 0) && size > 0) {
final int copyStart;
final int copyCount;
if (partialUnique == 0 && leftOver > 0) {
// we have to deal with the leaf that still contains leftovers
copyStart = rleaf - 1;
} else {
// we have to deal with the leaf that still contains leftovers
final int mergedLeafCount = sourceLeavesMerged ? 1 : 0;
copyStart = rleaf + mergedLeafCount;
}
copyCount = leafCount - copyStart;
if (copyCount > 0) {
System.arraycopy(leafValues, copyStart, leafValues, 0, copyCount);
System.arraycopy(leafCounts, copyStart, leafCounts, 0, copyCount);
System.arraycopy(leafSizes, copyStart, leafSizes, 0, copyCount);
if (copyCount > 1) {
System.arraycopy(directoryValues, copyStart, directoryValues, 0, copyCount - 1);
}
}
leafCount -= copyStart;
maybePromoteLastLeaf();
}
totalSize -= count;
destination.totalSize += count;
if (size == 0) {
clear();
}
if (SEGMENTED_SORTED_MULTISET_VALIDATION) {
if (size > 0 && destination.size > 0) {
Assert.assertion(geq(getMinInt(), destination.getMaxInt()), "geq(getMinInt(), destination.getMaxInt())");
}
}
validate();
destination.validate();
}
private void updateDirectory(int leaf) {
directoryValues[leaf] = leafValues[leaf][leafSizes[leaf] - 1];
}
/**
*
* @param finalSlots how many slots outside of completeLeaves are required
* @param completeLeaves how many complete leaves are required
*
* @return true if we should put our finalSlots values in the "extra" leaf. False if they should be appended to the last leaf that already exists
*/
private boolean prepareAppend(int finalSlots, int completeLeaves) {
Assert.leq(finalSlots, "finalSlots", leafSize, "leafSize");
if (completeLeaves == 0) {
// we are only going to append to the last leaf
if (leafCount == 0) {
directoryValues = new int[finalSlots];
directoryCount = new long[finalSlots];
return false;
} else if (leafCount == 1) {
if (size + finalSlots <= leafSize) {
directoryValues = Arrays.copyOf(directoryValues, finalSlots + size);
directoryCount = Arrays.copyOf(directoryCount, finalSlots + size);
return false;
}
moveDirectoryToLeaf(2);
updateDirectory(0);
return true;
}
if (finalSlots + leafSizes[leafCount - 1] > leafSize) {
reallocateLeafArrays(leafCount + 1);
updateDirectory(leafCount - 1);
return true;
}
return false;
} else {
// we are going to add leaves, then a final partial leaf
final boolean extraLeaf = finalSlots > 0;
final int extraLeafCount = extraLeaf ? 1 : 0;
if (leafCount == 0) {
allocateLeafArrays(completeLeaves + extraLeafCount);
} else if (leafCount == 1) {
moveDirectoryToLeaf(1 + completeLeaves + extraLeafCount);
} else {
reallocateLeafArrays(leafCount + completeLeaves + extraLeafCount);
}
if (extraLeaf) {
leafValues[leafCount + completeLeaves] = new int[leafSize];
leafCounts[leafCount + completeLeaves] = new long[leafSize];
}
return extraLeaf;
}
}
/**
* Prepare the SSM for prepending values to it.
*
* @param initialSlots how many slots outside of a complete leaf will be prepended
* @param completeLeaves how many complete leaves will be prepended
*
* @return true if the initialSlots values should be copied into their own private leaf, false if they should share space
* with the next leaf
*/
private boolean preparePrepend(int initialSlots, int completeLeaves) {
final int extraLeafCount;
if (completeLeaves > 0) {
final boolean extraLeaf = initialSlots > 0;
extraLeafCount = extraLeaf ? 1 : 0;
if (leafCount == 0) {
allocateLeafArrays(completeLeaves + extraLeafCount);
} else if (leafCount == 1) {
moveDirectoryToLeaf(completeLeaves + 1 + extraLeafCount, completeLeaves + extraLeafCount);
} else {
reallocateLeafArrays(leafCount + completeLeaves + extraLeafCount);
makeLeafHole(0, completeLeaves + extraLeafCount);
}
} else {
// we only have the partial leaf
Assert.gtZero(initialSlots, "initialSlots");
if (leafCount == 0) {
Assert.leq(initialSlots, "initialSlots", leafSize, "leafSize");
extraLeafCount = 1;
directoryValues = new int[initialSlots];
directoryCount = new long[initialSlots];
} else if (leafCount == 1) {
final boolean extraLeaf = initialSlots + size > leafSize;
extraLeafCount = extraLeaf ? 1 : 0;
if (extraLeaf) {
moveDirectoryToLeaf(2, 1);
}
} else {
final boolean extraLeaf = initialSlots + leafSizes[0] > leafSize;
extraLeafCount = extraLeaf ? 1 : 0;
if (extraLeaf) {
makeLeafHole(0, 1);
}
}
}
final int targetLeaf = completeLeaves + extraLeafCount;
leafCount += targetLeaf;
if (extraLeafCount == 0 && initialSlots > 0) {
// make a hole in the first leaf that still has values
if (directoryCount != null) {
final int [] tmpValues = new int[initialSlots + size];
final long [] tmpCount = new long[initialSlots + size];
System.arraycopy(directoryValues, 0, tmpValues, initialSlots, size);
System.arraycopy(directoryCount, 0, tmpCount, initialSlots, size);
directoryValues = tmpValues;
directoryCount = tmpCount;
} else {
final int copySize = leafSizes[targetLeaf];
System.arraycopy(leafValues[targetLeaf], 0, leafValues[targetLeaf], initialSlots, copySize);
System.arraycopy(leafCounts[targetLeaf], 0, leafCounts[targetLeaf], initialSlots, copySize);
}
}
return extraLeafCount > 0;
}
private static int countFront(long[] counts, int sz, MutableInt valuesToMove, MutableInt leftOvers) {
leftOvers.setValue(0);
int rpos = 0;
// figure out how many values we must move
while (valuesToMove.intValue() > 0 && rpos < sz) {
final long slotCount = counts[rpos];
if (valuesToMove.intValue() < slotCount) {
leftOvers.setValue(slotCount - valuesToMove.intValue());
valuesToMove.setValue(0);
} else {
valuesToMove.subtract(slotCount);
}
rpos++;
}
return rpos;
}
@Override
public void moveBackToFront(SegmentedSortedMultiSet untypedDestination, long count) {
final IntSegmentedSortedMultiset destination = (IntSegmentedSortedMultiset)untypedDestination;
validate();
destination.validate();
if (count == 0) {
return;
}
Assert.eq(leafSize, "leafSize", destination.leafSize, "destination.leafSize");
Assert.gtZero(leafCount, "leafCount");
if (SEGMENTED_SORTED_MULTISET_VALIDATION) {
if (destination.size > 0) {
Assert.assertion(leq(getMaxInt(), destination.getMinInt()), "leq(getMaxInt(), destination.getMinInt())");
}
}
if (destination.size > 0 && eq(getMaxInt(), destination.getMinInt())) {
final long maxCount = getMaxCount();
final long toAdd;
if (maxCount > count) {
toAdd = count;
addMaxCount(-count);
} else {
toAdd = maxCount;
removeMax();
}
destination.addMinCount(toAdd);
count -= toAdd;
}
if (count == 0) {
return;
}
final MutableInt remaining = new MutableInt(count);
final MutableInt leftOverMutable = new MutableInt();
int totalUniqueToMove = 0;
int slotsInPartialLeaf = 0;
int completeLeavesToMove = 0;
int rleaf = leafCount - 1;
if (leafCount == 1) {
// we need to move this many entries (the last one may be partial)
totalUniqueToMove = countBack(directoryCount, size, remaining, leftOverMutable);
Assert.eqZero(remaining.intValue(), "remaining.intValue()");
Assert.leq(totalUniqueToMove, "totalUniqueToMove", count, "count");
if (totalUniqueToMove == size && remaining.intValue() == 0) {
// we are moving the entire leaf
completeLeavesToMove = 1;
slotsInPartialLeaf = 0;
} else {
completeLeavesToMove = 0;
slotsInPartialLeaf = totalUniqueToMove;
}
} else {
while (remaining.intValue() > 0) {
final int uniqueToMove = countBack(leafCounts[rleaf], leafSizes[rleaf], remaining, leftOverMutable);
Assert.leq(totalUniqueToMove, "totalUniqueToMove", count, "count");
totalUniqueToMove += uniqueToMove;
if (uniqueToMove == leafSizes[rleaf]) {
rleaf--;
completeLeavesToMove++;
} else {
slotsInPartialLeaf = uniqueToMove;
}
}
}
final int leftOver = leftOverMutable.intValue();
final boolean extraLeaf = destination.preparePrepend(slotsInPartialLeaf, completeLeavesToMove);
if (slotsInPartialLeaf > 0) {
final boolean leftOverExists = leftOver > 0;
final int [] destValues;
final long [] destCounts;
final int [] srcValues;
final long [] srcCounts;
final int srcSize;
if (destination.directoryCount != null) {
destValues = destination.directoryValues;
destCounts = destination.directoryCount;
} else {
if (extraLeaf) {
destination.leafValues[0] = new int[leafSize];
destination.leafCounts[0] = new long[leafSize];
}
destValues = destination.leafValues[0];
destCounts = destination.leafCounts[0];
}
if (leafCount == 1) {
srcValues = directoryValues;
srcCounts = directoryCount;
srcSize = size;
} else {
srcValues = leafValues[rleaf];
srcCounts = leafCounts[rleaf];
srcSize = leafSizes[rleaf];
}
final int srcOffset = srcSize - slotsInPartialLeaf;
System.arraycopy(srcValues, srcOffset, destValues, 0, slotsInPartialLeaf);
System.arraycopy(srcCounts, srcOffset, destCounts, 0, slotsInPartialLeaf);
final int sizeChange = slotsInPartialLeaf + (leftOverExists ? -1 : 0);
size -= sizeChange;
destination.size += slotsInPartialLeaf;
if (destination.directoryCount == null) {
destination.leafSizes[0] += slotsInPartialLeaf;
if (destination.leafCount > 1) {
destination.updateDirectory(0);
}
}
if (leafCount > 1) {
leafSizes[rleaf] -= sizeChange;
}
if (leftOverExists) {
destCounts[0] -= leftOver;
srcCounts[srcOffset] = leftOver;
}
}
// now mass move a bunch of leaves over
if (completeLeavesToMove > 0) {
if (leafCount == 1) {
Assert.eqZero(slotsInPartialLeaf, "slotsInPartialLeaf");
destination.leafValues[0] = Arrays.copyOf(directoryValues, leafSize);
destination.leafCounts[0] = Arrays.copyOf(directoryCount, leafSize);
destination.size += size;
destination.leafSizes[0] = size;
if (destination.leafCount > 1) {
destination.updateDirectory(0);
}
if (leftOver > 0) {
destination.leafCounts[0][0] -= leftOver;
directoryCount[0] = leftOver;
size = 1;
} else {
size = 0;
}
} else {
final int destinationLeaf = slotsInPartialLeaf > 0 ? 1 : 0;
System.arraycopy(leafValues, rleaf + 1, destination.leafValues, destinationLeaf, completeLeavesToMove);
System.arraycopy(leafCounts, rleaf + 1, destination.leafCounts, destinationLeaf, completeLeavesToMove);
System.arraycopy(leafSizes, rleaf + 1, destination.leafSizes, destinationLeaf, completeLeavesToMove);
final int directoryMoves;
final boolean haveLastSourceDirectoryEntry = rleaf + 1 + completeLeavesToMove < leafCount - 1;
final boolean requireLastDestinationDirectoryEntry = destination.leafCount > (destinationLeaf + completeLeavesToMove);
if (haveLastSourceDirectoryEntry && requireLastDestinationDirectoryEntry) {
directoryMoves = completeLeavesToMove;
} else {
directoryMoves = completeLeavesToMove - 1;
}
if (directoryMoves > 0) {
System.arraycopy(directoryValues, rleaf + 1, destination.directoryValues, destinationLeaf, directoryMoves);
}
if (requireLastDestinationDirectoryEntry) {
destination.updateDirectory(destinationLeaf + completeLeavesToMove - 1);
}
final boolean hasLeftOverSlot = leftOver > 0 && slotsInPartialLeaf == 0;
if (hasLeftOverSlot) {
// fixup the destination, we must have a destinationLeaf of 0
Assert.eqZero(destinationLeaf, "destinationLeaf");
destination.leafCounts[0][0] -= leftOver;
}
final int numberOfLeavesToRemove = hasLeftOverSlot ? completeLeavesToMove - 1 : completeLeavesToMove;
leafCount -= numberOfLeavesToRemove;
if (hasLeftOverSlot) {
// we need to copy the array, so that it is not aliased to two different nodes
leafCounts[rleaf + 1] = new long[leafSize];
leafValues[rleaf + 1] = new int[leafSize];
leafValues[rleaf + 1][0] = destination.leafValues[0][0];
leafCounts[rleaf + 1][0] = leftOver;
leafSizes[rleaf + 1] = 1;
// we'll take it away in the loop below, so we need to preserve it here
size++;
}
for (int ii = 0; ii < completeLeavesToMove; ++ii) {
size -= destination.leafSizes[destinationLeaf + ii];
destination.size += destination.leafSizes[destinationLeaf + ii];
}
final int firstLeafTozero = hasLeftOverSlot ? rleaf + 2 : rleaf + 1;
Arrays.fill(leafValues, firstLeafTozero, firstLeafTozero + numberOfLeavesToRemove, null);
Arrays.fill(leafCounts, firstLeafTozero, firstLeafTozero + numberOfLeavesToRemove, null);
Arrays.fill(leafSizes, firstLeafTozero , firstLeafTozero + numberOfLeavesToRemove, 0);
if (directoryMoves > 0) {
Arrays.fill(directoryValues, firstLeafTozero, firstLeafTozero + directoryMoves - (completeLeavesToMove - numberOfLeavesToRemove), NULL_INT);
}
maybePromoteLastLeaf();
}
destination.maybePromoteLastLeaf();
}
totalSize -= count;
destination.totalSize += count;
if (size == 0) {
clear();
}
validate();
destination.validate();
if (SEGMENTED_SORTED_MULTISET_VALIDATION) {
if (size > 0 && destination.size > 0) {
Assert.assertion(leq(getMaxInt(), destination.getMinInt()), "leq(getMaxInt(), destination.getMinInt())");
}
}
}
private static int countBack(long[] counts, int sz, MutableInt valuesToMove, MutableInt leftOvers) {
leftOvers.setValue(0);
int rpos = sz;
// figure out how many values we must move
while (valuesToMove.intValue() > 0 && rpos > 0) {
final long slotCount = counts[--rpos];
if (valuesToMove.intValue() < slotCount) {
leftOvers.setValue(slotCount - valuesToMove.intValue());
valuesToMove.setValue(0);
} else {
valuesToMove.subtract(slotCount);
}
}
return sz - rpos;
}
//endregion
@Override
public WritableIntChunk<?> keyChunk() {
final WritableIntChunk<?> keyChunk = WritableIntChunk.makeWritableChunk(intSize());
fillKeyChunk(keyChunk, 0);
return keyChunk;
}
@Override
public void fillKeyChunk(WritableChunk<?> keyChunk, int offset) {
fillKeyChunk(keyChunk.asWritableIntChunk(), offset);
}
private void fillKeyChunk(WritableIntChunk<?> keyChunk, int offset) {
if(keyChunk.capacity() < offset + intSize()) {
throw new IllegalArgumentException("Input chunk is not large enough");
}
if (leafCount == 1) {
keyChunk.copyFromTypedArray(directoryValues, 0, offset, size);
} else if (leafCount > 0) {
int destOffset = 0;
for (int li = 0; li < leafCount; ++li) {
keyChunk.copyFromTypedArray(leafValues[li], 0, offset + destOffset, leafSizes[li]);
destOffset += leafSizes[li];
}
}
}
@Override
public WritableLongChunk<?> countChunk() {
final WritableLongChunk<Any> countChunk = WritableLongChunk.makeWritableChunk(intSize());
if (leafCount == 1) {
countChunk.copyFromTypedArray(directoryCount, 0, 0, size);
} else if (leafCount > 0) {
int offset = 0;
for (int li = 0; li < leafCount; ++li) {
countChunk.copyFromTypedArray(leafCounts[li], 0, offset, leafSizes[li]);
offset += leafSizes[li];
}
}
return countChunk;
}
private int[] keyArray() {
return keyArray(0, size-1);
}
/**
* Create an array of the current keys beginning with the first (inclusive) and ending with the last (inclusive)
* @param first
* @param last
* @return
*/
private int[] keyArray(long first, long last) {
if(isEmpty()) {
return ArrayTypeUtils.EMPTY_INT_ARRAY;
}
final int totalSize = (int)(last - first + 1);
final int[] keyArray = new int[totalSize];
if (leafCount == 1) {
System.arraycopy(directoryValues, (int)first, keyArray, 0, totalSize);
} else if (leafCount > 0) {
int offset = 0;
int copied = 0;
int skipped = 0;
for (int li = 0; li < leafCount && copied < totalSize; ++li) {
if(skipped < first) {
final int toSkip = (int)first - skipped;
if(toSkip < leafSizes[li]) {
final int nToCopy = Math.min(leafSizes[li] - toSkip, totalSize);
System.arraycopy(leafValues[li], toSkip, keyArray, 0, nToCopy);
copied = nToCopy;
offset = copied;
skipped = (int)first;
} else {
skipped += leafSizes[li];
}
} else {
int nToCopy = Math.min(leafSizes[li], totalSize - copied);
System.arraycopy(leafValues[li], 0, keyArray, offset, nToCopy);
offset += leafSizes[li];
copied += nToCopy;
}
}
}
return keyArray;
}
// region Delta Management
private void maybeAccumulateAdditions(WritableIntChunk<? extends Values> valuesToInsert) {
if (!accumulateDeltas || valuesToInsert.size() == 0) {
return;
}
if(prevValues == null) {
prevValues = new IntVectorDirect(keyArray());
}
if (added == null) {
added = new TIntHashSet(valuesToInsert.size());
}
if(removed == null) {
for (int ii = 0; ii < valuesToInsert.size(); ii++) {
added.add(valuesToInsert.get(ii));
}
} else {
for (int ii = 0; ii < valuesToInsert.size(); ii++) {
int val = valuesToInsert.get(ii);
// Only add to the 'added' set if it was not removed before.
// if it was then this key is a net-no-change.
if (!removed.remove(val)) {
added.add(val);
}
}
}
}
private void maybeAccumulateRemoval(int valueRemoved) {
if(!accumulateDeltas) {
return;
}
if(prevValues == null) {
prevValues = new IntVectorDirect(keyArray());
}
if(removed == null) {
removed = new TIntHashSet();
}
if(added == null || !added.remove(valueRemoved)) {
removed.add(valueRemoved);
}
}
@Override
public void setTrackDeltas(boolean shouldTrackDeltas) {
this.accumulateDeltas = shouldTrackDeltas;
}
@Override
public void clearDeltas() {
added = removed = null;
prevValues = null;
}
@Override
public int getAddedSize() {
return added == null ? 0 : added.size();
}
@Override
public int getRemovedSize() {
return removed == null ? 0 : removed.size();
}
public void fillRemovedChunk(WritableIntChunk<? extends Values> chunk, int position) {
chunk.copyFromTypedArray(removed.toArray(), 0, position, removed.size());
}
public void fillAddedChunk(WritableIntChunk<? extends Values> chunk, int position) {
chunk.copyFromTypedArray(added.toArray(), 0, position, added.size());
}
public IntVector getPrevValues() {
return prevValues == null ? this : prevValues;
}
// endregion
// region IntVector
@Override
public int get(long i) {
if(i < 0 || i > size()) {
throw new IllegalArgumentException("Illegal index " + i + " current size: " + size());
}
if(leafCount == 1) {
return directoryValues[(int)i];
} else {
for(int ii = 0; ii < leafCount; ii++) {
if(i < leafSizes[ii]) {
return leafValues[ii][(int)(i)];
}
i -= leafSizes[ii];
}
}
throw new IllegalStateException("Index " + i + " not found in this SSM");
}
@Override
public IntVector subVector(long fromIndex, long toIndex) {
return new IntVectorDirect(keyArray(fromIndex, toIndex));
}
@Override
public IntVector subVectorByPositions(long[] positions) {
final int[] keyArray = new int[positions.length];
int writePos = 0;
for (long position : positions) {
keyArray[writePos++] = get(position);
}
return new IntVectorDirect(keyArray);
}
@Override
public int[] toArray() {
return keyArray();
}
@Override
public long size() {
return size;
}
@Override
public IntVector getDirect() {
return new IntVectorDirect(keyArray());
}
//endregion
//region VectorEquals
private boolean equalsArray(IntVector o) {
if(size() != o.size()) {
return false;
}
if(leafCount == 1) {
for(int ii = 0; ii < size; ii++) {
//region DirObjectEquals
if(directoryValues[ii] != o.get(ii)) {
return false;
}
//endregion DirObjectEquals
}
return true;
}
int nCompared = 0;
for (int li = 0; li < leafCount; ++li) {
for(int ai = 0; ai < leafSizes[li]; ai++) {
if(leafValues[li][ai] != o.get(nCompared++)) {
return false;
}
}
}
return true;
}
//endregion VectorEquals
private boolean equalsArray(ObjectVector<?> o) {
//region EqualsArrayTypeCheck
if(o.getComponentType() != int.class && o.getComponentType() != Integer.class) {
return false;
}
//endregion EqualsArrayTypeCheck
if(size() != o.size()) {
return false;
}
if(leafCount == 1) {
for(int ii = 0; ii < size; ii++) {
final Integer val = (Integer)o.get(ii);
//region VectorEquals
if(directoryValues[ii] == NULL_INT && val != null && val != NULL_INT) {
return false;
}
//endregion VectorEquals
if(!Objects.equals(directoryValues[ii], val)) {
return false;
}
}
return true;
}
int nCompared = 0;
for (int li = 0; li < leafCount; ++li) {
for(int ai = 0; ai < leafSizes[li]; ai++) {
final Integer val = (Integer)o.get(nCompared++);
//region VectorEquals
if(leafValues[li][ai] == NULL_INT && val != null && val != NULL_INT) {
return false;
}
//endregion VectorEquals
if(!Objects.equals(leafValues[li][ai], val)) {
return false;
}
}
}
return true;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof IntSegmentedSortedMultiset)) {
//region VectorEquals
if(o instanceof IntVector) {
return equalsArray((IntVector)o);
}
//endregion VectorEquals
if(o instanceof ObjectVector) {
return equalsArray((ObjectVector)o);
}
return false;
}
final IntSegmentedSortedMultiset that = (IntSegmentedSortedMultiset) o;
if(size() != that.size()) {
return false;
}
if(leafCount == 1) {
if(that.leafCount != 1 || size != that.size) {
return false;
}
for(int ii = 0; ii < size; ii++) {
//region DirObjectEquals
if(directoryValues[ii] != that.directoryValues[ii]) {
return false;
}
//endregion DirObjectEquals
}
return true;
}
int otherLeaf = 0;
int otherLeafIdx = 0;
for (int li = 0; li < leafCount; ++li) {
for(int ai = 0; ai < leafSizes[li]; ai++) {
//region LeafObjectEquals
if(leafValues[li][ai] != that.leafValues[otherLeaf][otherLeafIdx++]) {
return false;
}
//endregion LeafObjectEquals
if(otherLeafIdx >= that.leafSizes[otherLeaf]) {
otherLeaf++;
otherLeafIdx = 0;
}
if(otherLeaf >= that.leafCount) {
return false;
}
}
}
return true;
}
@Override
public int hashCode() {
if(leafCount == 1) {
int result = Objects.hash(size);
for(int ii = 0; ii < size; ii++) {
result = result * 31 + Objects.hash(directoryValues[ii]);
}
return result;
}
int result = Objects.hash(leafCount, size);
for (int li = 0; li < leafCount; ++li) {
for(int ai = 0; ai < leafSizes[li]; ai++) {
result = result * 31 + Objects.hash(leafValues[li][ai]);
}
}
return result;
}
@Override
public String toString() {
if (leafCount == 1) {
return ArrayTypeUtils.toString(directoryValues, 0, intSize());
} else if (leafCount > 0) {
StringBuilder arrAsString = new StringBuilder("[");
for (int li = 0; li < leafCount; ++li) {
for(int ai = 0; ai < leafSizes[li]; ai++) {
arrAsString.append(leafValues[li][ai]).append(", ");
}
}
arrAsString.replace(arrAsString.length() - 2, arrAsString.length(), "]");
return arrAsString.toString();
}
return "[]";
}
// region Extensions
// endregion Extensions
}
| 40.244922
| 245
| 0.553075
|
ecaf6b2b061342f9410895188f96580b6e163261
| 281
|
//algoritmo com função resto %
//
package aulaJava.principal;
public class ClasseMod {
public static void main(String[] args) {
double carro =9;
double pessoa =2;
double resto = carro % pessoa;
System.out.println("sobraram exatamente: " +resto+" carros.");
}
}
| 15.611111
| 64
| 0.672598
|
a9640802c6de29ed92ffdebf1cffbcb2972e32c5
| 1,085
|
package com.jaoafa.jaosuperachievement2.achievements;
import com.jaoafa.jaosuperachievement2.api.Achievementjao;
import com.jaoafa.jaosuperachievement2.lib.Achievement;
import com.jaoafa.jaosuperachievement2.lib.AchievementInterface;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.player.PlayerCommandPreprocessEvent;
public class ChangeTime implements AchievementInterface, Listener {
@Override
public Achievement getAchievement() {
return Achievement.CHANGETIME;
}
@EventHandler(ignoreCancelled = true)
public void onCommand(PlayerCommandPreprocessEvent event) {
Player player = event.getPlayer();
if(player.hasMetadata("NPC")){
return;
}
String command = event.getMessage();
if (!player.hasPermission("minecraft.command.time")) {
return;
}
if (!command.startsWith("/time set ")) {
return;
}
Achievementjao.getAchievementAsync(player, getAchievement());
}
}
| 32.878788
| 69
| 0.709677
|
ca24ad048e2809db6bbecd2e95193a97fe85abf5
| 1,059
|
package no.nav.arbeid.pam.kodeverk.ansettelse;
import static no.nav.arbeid.pam.kodeverk.ansettelse.Sprak.BOKMAL;
import static no.nav.arbeid.pam.kodeverk.ansettelse.Sprak.NYNORSK;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@SuppressWarnings({"serial"})
public enum Arbeidsdager implements Kode {
LOERDAG(new HashMap<String, String>() {
{
put(BOKMAL, "Lørdag");
put(NYNORSK, "Lørdag");
}
}),
SOENDAG(new HashMap<String, String>() {
{
put(BOKMAL, "Søndag");
put(NYNORSK, "Søndag");
}
}),
UKEDAGER(new HashMap<String, String>() {
{
put(BOKMAL, "Ukedager");
put(NYNORSK, "Ukedager");
}
});
private final Map<String, String> sprakTekster;
private Arbeidsdager(Map<String, String> sprakTekster) {
this.sprakTekster = Collections.unmodifiableMap(sprakTekster);
}
@Override
public Map<String, String> tekster() {
return sprakTekster;
}
}
| 23.021739
| 70
| 0.612842
|
0a60e7c644ec52a44e545c7d5f248f3eeb4fb6cc
| 9,357
|
package opener15;
import tasks.AbstractTask;
import tasks.Tester;
import utils.FileUtils;
import utils.log.Logger;
import java.io.IOException;
import java.math.BigInteger;
import java.util.*;
import static utils.OtherUtils.padLeft;
//Answer : -136501880806045292
public class Task_38 extends AbstractTask {
public static void main(String[] args) {
Logger.init("default.log");
Tester.test(new Task_38());
Logger.close();
}
int steps = 2 * 128 * 128 * 128 + 128 + 5 - 1;
int baseStep = steps - (steps - 400) / 12 * 12;
int st12 = (steps - baseStep) / 12;
Map<Cell, Cell> all = new HashMap<>();
Set<Cell> changed = new HashSet<>();
Set<Cell> affected = new HashSet<>();
Map<CellGroup, CellGroup> states = new HashMap<>();
int current = 0;
List<CellGroup> groups = new ArrayList<>();
public void solving() throws IOException {
for (String cell : FileUtils.readLines("/downloads/task38/" + padLeft("" + baseStep, 4, '0') + ".points")) {
if (cell.isEmpty()) {
if (all.size() == 0) {
System.out.println("Shouldn't happen");
}
groups.add(runGroup());
progress(groups.size());
continue;
}
cell = cell.replaceAll(" +", "");
assert !cell.isEmpty();
String[] sp = cell.substring(1, cell.length() - 1).split(",");
Cell c = new Cell(pi(sp[0]), pi(sp[1]), pi(sp[2]), pi(sp[3]));
c.state = true;
if (!changed.contains(c)) {
changed.add(c);
affected.add(c);
all.put(c, c);
}
}
assert all.size() == 0;
BigInteger res = BigInteger.ZERO;
for (CellGroup group : groups) {
for (Cell c : group.translate((12 / group.cycle) * st12)) {
res = res.add(c.value());
}
}
System.out.println(res);
}
private CellGroup runGroup() throws IOException {
// System.out.print(all.size() + ": ");
int steps = -1;
CellGroup res;
while (true) {
++steps;
List<Cell> stateCells = new ArrayList<>();
for (Cell c : all.keySet()) {
if (c.state) {
stateCells.add(c);
}
}
CellGroup cg = CellGroup.normalize(stateCells);
CellGroup ex = states.get(cg);
if (ex != null) {
ex.setd(cg.ma - ex.ma, cg.mb - ex.mb, cg.mc - ex.mc, cg.md - ex.md);
ex.cycle = steps;
// System.out.println(steps + " steps");
// System.out.println(new Cell(ex.ma, ex.mb, ex.mc, ex.md));
// System.out.println(new Cell(cg.ma, cg.mb, cg.mc, cg.md));
// System.out.println(new Cell(ex.da, ex.db, ex.dc, ex.dd));
// System.out.println();
res = ex;
break;
}
states.put(cg, cg);
for (Cell cell : changed) {
boolean state = cell.state;
int dnb = state ? 1 : -1;
for (int a = -1; a < 2; ++a) {
for (int b = -1; b < 2; ++b) {
for (int c = -1; c < 2; ++c) {
for (int d = -1; d < 2; ++d) {
if (a == 0 && b == 0 && c == 0 && d == 0) {
continue;
}
Cell key = new Cell(cell.a + a, cell.b + b, cell.c + c, cell.d + d);
Cell nc = all.get(key);
if (nc == null) {
if (state) {
nc = key;
all.put(nc, nc);
nc.nbCount = 1;
affected.add(nc);
}
} else {
nc.nbCount += dnb;
affected.add(nc);
}
}
}
}
}
current += dnb;
}
changed.clear();
for (Cell cell : affected) {
if (cell.state) {
if (cell.nbCount < 9 || cell.nbCount > 11) {
cell.invert();
changed.add(cell);
if (cell.nbCount == 0) {
//became empty with no neighbours
all.remove(cell);
}
}
} else {
// empty cell
if (cell.nbCount == 10) {
cell.invert();
changed.add(cell);
} else {
if (cell.nbCount == 0) {
//empty with no neighbours
all.remove(cell);
}
}
}
}
affected.clear();
}
current = 0;
all.clear();
states.clear();
changed.clear();
affected.clear();
return res;
}
public static class CellGroup {
final Set<Cell> cells;
final int hashCode;
final int m[] = new int[4];
final int d[] = new int[4];
final int ma;
final int mb;
final int mc;
final int md;
int da;
int db;
int dc;
int dd;
int cycle;
public CellGroup(Set<Cell> newCells, int ma, int mb, int mc, int md) {
cells = newCells;
m[0] = ma;
m[1] = mb;
m[2] = mc;
m[3] = md;
this.ma = ma;
this.mb = mb;
this.mc = mc;
this.md = md;
hashCode = cells.hashCode();
}
public void setd(int da, int db, int dc, int dd) {
d[0] = da;
d[1] = db;
d[2] = dc;
d[3] = dd;
this.da = da;
this.db = db;
this.dc = dc;
this.dd = dd;
}
public int size() {
return cells.size();
}
public Set<Cell> translate(int cycles) {
Set<Cell> newCells = new HashSet<>();
for (Cell c : cells) {
newCells.add(
c.translate(ma + da * cycles, mb + db * cycles, mc + dc * cycles, md + dd * cycles)
);
}
return newCells;
}
public static CellGroup normalize(List<Cell> cells) {
int mina, minb, minc, mind;
mina = minb = minc = mind = 100000;
Set<Cell> newCells = new HashSet<>();
for (Cell c : cells) {
mina = min(mina, c.a);
minb = min(minb, c.b);
minc = min(minc, c.c);
mind = min(mind, c.d);
}
for (Cell c : cells) {
newCells.add(new Cell(c.a - mina, c.b - minb, c.c - minc, c.d - mind));
}
return new CellGroup(newCells, mina, minb, minc, mind);
}
@Override
public int hashCode() {
return hashCode;
}
@Override
public boolean equals(Object o) {
CellGroup cellGroup = (CellGroup) o;
return cells.equals(cellGroup.cells);
}
}
public static class Cell implements Comparable<Cell> {
public final int a;
public final int b;
public final int c;
public final int d;
private final int hashCode;
boolean state;
int nbCount;
public Cell(int a, int b, int c, int d) {
this.a = a;
this.b = b;
this.c = c;
this.d = d;
this.state = false;
hashCode = new Integer(a + b + c + d).hashCode();
}
public void invert() {
state = !state;
}
public BigInteger value() {
return bi(a).multiply(bi(b)).multiply(bi(c)).multiply(bi(d));
}
public Cell translate(int da, int db, int dc, int dd) {
return new Cell(a + da, b + db, c + dc, d + dd);
}
public int hashCode() {
return hashCode;
}
public boolean equals(Object obj) {
Cell p = (Cell) obj;
return a == p.a && b == p.b && c == p.c && d == p.d;
}
public int compareTo(Cell q) {
int ret = (a < q.a) ? -1 : (a == q.a ? 0 : 1);
if (ret != 0) return ret;
ret = (b < q.b) ? -1 : (b == q.b ? 0 : 1);
if (ret != 0) return ret;
ret = (c < q.c) ? -1 : (c == q.c ? 0 : 1);
if (ret != 0) return ret;
return (d < q.d) ? -1 : (d == q.d ? 0 : 1);
}
@Override
public String toString() {
return "(" + a + "," + b + "," + c + "," + d + ')';
}
}
}
| 29.799363
| 116
| 0.395533
|
83e7e06d205d3d5d54668025bfcf174e3c8c6f5b
| 380
|
package com.homework.domain.coordenador;
import java.io.Serializable;
import javax.persistence.Entity;
import com.homework.domain.usuario.Usuario;
import lombok.EqualsAndHashCode;
@Entity
@EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = true)
public class Coordenador extends Usuario implements Serializable{
private static final long serialVersionUID = 1L;
}
| 22.352941
| 67
| 0.823684
|
782acf019258d709cdef93439699b4bfc2df2e2b
| 1,241
|
package org.ostis.scmemory.model.pattern;
import org.ostis.scmemory.model.pattern.element.ScPatternElement;
/**
* This class represents the basic element of a universal searching pattern.
* Each ScPattern consists of a number of scc-triplets with searching rules.
* Searching rule is a variant of {@link ScPatternElement} object that can be:
* <ul>
* <li>
* Fixed. If one of ScPatternElement is {@link org.ostis.scmemory.model.pattern.element.ScFixedElement},
* the element address will be used at searching. This address will be present and constant in every founded pattern.
* </li>
* <li>
* Typed. If one of ScPatternElement is {@link org.ostis.scmemory.model.pattern.element.ScTypedElement},
* the element type will be used at searching. Address of this element can be different in different founded patterns.
* </li>
* <li>
* Aliased. Alise is used when you want to use Typed parameter as fixed. Api - {@link org.ostis.scmemory.model.pattern.element.ScAliasedElement}
* </li>
* </ul>
*
* @author artrayme
* @since 0.6.0
*/
public interface ScPatternTriplet {
ScPatternElement get1();
ScPatternElement get2();
ScPatternElement get3();
}
| 37.606061
| 152
| 0.702659
|
9f42fdfd27a5fc62db954ba243d66bfaa30ca088
| 498
|
package com.atguigu.gmall.search.pojo;
import lombok.Data;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.FieldType;
/**
* @author 凯少
* @create 2019-12-09 20:03
*/
@Data
public class SearchAttr {
@Field(type = FieldType.Long)
private Long attrId;
@Field(type = FieldType.Keyword)
private String attrName;
@Field(type = FieldType.Keyword)
private String attrValue;
// private Long spuId;
}
| 22.636364
| 68
| 0.732932
|
ee026652abd8176b927ef17fd8dbc1c33e5679d5
| 6,001
|
/*
* Copyright 2019-2021 Michael Hoffer <info@michaelhoffer.de>. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* If you use this software for scientific research then please cite the following publication(s):
*
* M. Hoffer, C. Poliwoda, & G. Wittum. (2013). Visual reflection library:
* a framework for declarative GUI programming on the Java platform.
* Computing and Visualization in Science, 2013, 16(4),
* 181–192. http://doi.org/10.1007/s00791-014-0230-y
*/
package eu.mihosoft.binarytypeutils;
import eu.mihosoft.streamutils.StreamUtils;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteOrder;
import java.util.HashMap;
import java.util.Map;
/**
* Serializes and deserializes Java number objects to and from specified binary representation
* (binary-compatible with C/C++ on many devices, can be used for communicating with MCUs and Boards such as Arduinos etc.).
*/
class BinarySerializerImpl implements BinarySerializer, BinaryDeserializer {
private final StreamUtils.ReadingInputStream inputStream;
private final OutputStream outputStream;
private final ByteOrder byteOrder;
private final Map<BinaryType, BinaryConverter> converterByBinaryTypeName = new HashMap<>();
/**
* Creates a new serializer instance.
* @param inputStream input stream to be used by this communication object
* @param outputStream output stream to be used by this communication object
* @param byteOrder byte order to be used
*/
private BinarySerializerImpl(InputStream inputStream, OutputStream outputStream, ByteOrder byteOrder) {
org.tinylog.Logger.debug("creating BinaryCommunication with byte-order:='{}'.",
byteOrder);
this.outputStream = outputStream;
this.inputStream = new StreamUtils.ReadingInputStream(inputStream);
this.byteOrder = byteOrder;
init();
}
/**
* Creates a new serialization instance.
*
* @param inputStream input stream to use for reading data
* @param outputStream output stream to use for writing data
* @param byteOrder byte order for value conversion
* @return new instance
*/
/*pkg private*/ static BinarySerializerImpl newInstance(InputStream inputStream, OutputStream outputStream, ByteOrder byteOrder) {
return new BinarySerializerImpl(inputStream, outputStream, byteOrder);
}
@Override
public ByteOrder getByteOrder() {
return byteOrder;
}
/**
* Initializes this communication object (creates converter instances).
*/
private void init() {
converterByBinaryTypeName.put(BinaryType.BYTE, new BinaryConverter.SByteConverter());
converterByBinaryTypeName.put(BinaryType.INT8, new BinaryConverter.SByteConverter());
converterByBinaryTypeName.put(BinaryType.UBYTE, new BinaryConverter.UByteConverter());
converterByBinaryTypeName.put(BinaryType.UINT8, new BinaryConverter.UByteConverter());
converterByBinaryTypeName.put(BinaryType.INT64, new BinaryConverter.SINT8Converter());
converterByBinaryTypeName.put(BinaryType.INT32, new BinaryConverter.SINT4Converter());
converterByBinaryTypeName.put(BinaryType.UINT32, new BinaryConverter.UINT4Converter());
converterByBinaryTypeName.put(BinaryType.INT16, new BinaryConverter.SINT2Converter());
converterByBinaryTypeName.put(BinaryType.UINT16, new BinaryConverter.UINT2Converter());
converterByBinaryTypeName.put(BinaryType.FLOAT32, new BinaryConverter.SFloat4Converter());
converterByBinaryTypeName.put(BinaryType.FLOAT64, new BinaryConverter.SFloat8Converter());
}
@Override
public byte[] getReadBytes() {
return this.inputStream.getReadBytes();
}
@Override
public void clearReadBuffer() {
this.inputStream.clearBuffer();
}
@Override
public InputStream getInputStream() {
return inputStream;
}
@Override
public OutputStream getOutputStream() {
return outputStream;
}
@Override
public Object readValue(BinaryType binaryType) {
org.tinylog.Logger.debug("receiving value binaryType='{}'.", binaryType);
BinaryConverter converter = converterByBinaryTypeName.get(binaryType);
if(converter==null) {
RuntimeException ex = new RuntimeException("No converter found for binary type: " + binaryType + ".");
throw ex;
}
try {
Object o = converter.read(inputStream, byteOrder).getObject();
return o;
} catch (IOException e) {
RuntimeException ex = new RuntimeException("Cannot receive value of type " + binaryType+".", e);
throw ex;
}
}
@Override
public void writeValue(BinaryType binaryType, Object value) {
org.tinylog.Logger.debug("sending value binaryType='{}'.", binaryType);
BinaryConverter converter = converterByBinaryTypeName.get(binaryType);
if(converter==null) {
RuntimeException ex = new RuntimeException("No converter found for binary type: " + binaryType + ".");
throw ex;
}
try {
converter.write(value, outputStream, byteOrder);
} catch (IOException e) {
RuntimeException ex = new RuntimeException("Cannot write value of type " + binaryType+".", e);
throw ex;
}
}
}
| 37.50625
| 134
| 0.701883
|
10c79c38da22b4e194757336e079982f835e9e85
| 7,075
|
package io.cucumber.compatibility;
import io.cucumber.messages.internal.com.fasterxml.jackson.databind.JsonNode;
import io.cucumber.messages.internal.com.fasterxml.jackson.databind.node.ArrayNode;
import io.cucumber.messages.internal.com.fasterxml.jackson.databind.node.BooleanNode;
import io.cucumber.messages.internal.com.fasterxml.jackson.databind.node.NumericNode;
import io.cucumber.messages.internal.com.fasterxml.jackson.databind.node.ObjectNode;
import io.cucumber.messages.internal.com.fasterxml.jackson.databind.node.TextNode;
import org.hamcrest.CoreMatchers;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeDiagnosingMatcher;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Spliterator;
import java.util.stream.Collectors;
import static java.util.Spliterators.spliteratorUnknownSize;
import static java.util.stream.StreamSupport.stream;
import static org.hamcrest.CoreMatchers.anyOf;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.isA;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.collection.IsIterableContainingInOrder.contains;
import static org.hamcrest.collection.IsIterableContainingInRelativeOrder.containsInRelativeOrder;
import static org.hamcrest.collection.IsMapContaining.hasEntry;
import static org.hamcrest.collection.IsMapContaining.hasKey;
public class AComparableMessage extends
TypeSafeDiagnosingMatcher<JsonNode> {
private final List<Matcher<?>> expectedFields;
private final int depth;
public AComparableMessage(JsonNode expectedMessage) {
this(expectedMessage, 0);
}
AComparableMessage(JsonNode expectedMessage, int depth) {
this.depth = depth + 1;
this.expectedFields = extractExpectedFields(expectedMessage, this.depth);
}
private static List<Matcher<?>> extractExpectedFields(JsonNode expectedMessage, int depth) {
List<Matcher<?>> expected = new ArrayList<>();
asMapOfJsonNameToField(expectedMessage).forEach((fieldName, expectedValue) -> {
switch (fieldName) {
// exception: error messages are platform specific
case "message":
expected.add(hasEntry(is(fieldName), isA(expectedValue.getClass())));
break;
// exception: the CCK uses relative paths as uris
case "uri":
expected.add(hasEntry(is(fieldName), isA(expectedValue.getClass())));
break;
// exception: the CCK expects source references with URIs but
// Java can only provide method and stack trace references.
case "sourceReference":
expected.add(hasKey(is(fieldName)));
break;
// exception: ids are not predictable
case "id":
case "pickleId":
case "astNodeId":
case "hookId":
case "pickleStepId":
case "testCaseId":
case "testStepId":
case "testCaseStartedId":
expected.add(hasEntry(is(fieldName), isA(TextNode.class)));
break;
// exception: protocolVersion can vary
case "protocolVersion":
expected.add(hasEntry(is(fieldName), isA(TextNode.class)));
break;
case "astNodeIds":
case "stepDefinitionIds":
expected.add(hasEntry(is(fieldName),
containsInRelativeOrder(isA(TextNode.class))));
break;
// exception: timestamps and durations are not predictable
case "timestamp":
case "duration":
expected.add(hasEntry(is(fieldName), isA(expectedValue.getClass())));
break;
// exception: Mata fields depend on the platform
case "implementation":
case "runtime":
case "os":
case "cpu":
expected.add(hasEntry(is(fieldName), isA(expectedValue.getClass())));
break;
case "ci":
// exception: Absent when running locally, present in ci
expected.add(
anyOf(not(hasKey(is(fieldName))), hasEntry(is(fieldName),
isA(expectedValue.getClass()))));
break;
default:
expected.add(hasEntry(is(fieldName), aComparableValue(expectedValue,
depth)));
}
});
return expected;
}
@SuppressWarnings("unchecked")
private static Matcher<?> aComparableValue(Object value, int depth) {
if (value instanceof ObjectNode) {
JsonNode message = (JsonNode) value;
return new AComparableMessage(message, depth);
}
if (value instanceof ArrayNode) {
ArrayNode values = (ArrayNode) value;
Spliterator<JsonNode> spliterator = spliteratorUnknownSize(values.iterator(), 0);
List<Matcher<? super Object>> allComparableValues = stream(spliterator, false)
.map(o -> aComparableValue(o, depth))
.map(o -> (Matcher<? super Object>) o)
.collect(Collectors.toList());
return contains(allComparableValues);
}
if (value instanceof TextNode
|| value instanceof NumericNode
|| value instanceof BooleanNode) {
return CoreMatchers.is(value);
}
throw new IllegalArgumentException("Unsupported type " + value.getClass() +
": " + value);
}
@Override
public void describeTo(Description description) {
StringBuilder padding = new StringBuilder();
for (int i = 0; i < depth + 1; i++) {
padding.append("\t");
}
description.appendList("\n" + padding, ",\n" + padding,
"\n", expectedFields);
}
@Override
protected boolean matchesSafely(JsonNode actual, Description mismatchDescription) {
Map<String, Object> actualFields = asMapOfJsonNameToField(actual);
for (Matcher<?> expectedField : expectedFields) {
if (!expectedField.matches(actualFields)) {
expectedField.describeMismatch(actualFields, mismatchDescription);
return false;
}
}
return true;
}
private static Map<String, Object> asMapOfJsonNameToField(JsonNode envelope) {
Map<String, Object> map = new LinkedHashMap<>();
envelope.fieldNames()
.forEachRemaining(jsonField -> {
JsonNode value = envelope.get(jsonField);
map.put(jsonField, value);
});
return map;
}
}
| 40.66092
| 98
| 0.606926
|
18b63aa77f9c91f2db57710a94bd04d34d81cc73
| 403
|
package org.ovirt.engine.core.utils.serialization.json;
import java.util.Collection;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.databind.annotation.JsonTypeResolver;
@SuppressWarnings("serial")
@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS)
@JsonTypeResolver(JsonCustomTypeResolverBuilder.class)
public abstract class JsonCollectionMixIn implements Collection {
}
| 31
| 66
| 0.846154
|
037229f097296c9eec1c8126edfe967c9f831d49
| 2,858
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.rest;
import org.apache.http.entity.StringEntity;
import org.elasticsearch.client.Response;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.hamcrest.Matcher;
import java.io.IOException;
import java.util.Map;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
public class Netty4HeadBodyIsEmptyIT extends ESRestTestCase {
public void testHeadRoot() throws IOException {
headTestCase("/", emptyMap(), greaterThan(0));
headTestCase("/", singletonMap("pretty", ""), greaterThan(0));
headTestCase("/", singletonMap("pretty", "true"), greaterThan(0));
}
private void createTestDoc() throws IOException {
client().performRequest("PUT", "test/test/1", emptyMap(), new StringEntity("{\"test\": \"test\"}"));
}
public void testDocumentExists() throws IOException {
createTestDoc();
headTestCase("test/test/1", emptyMap(), equalTo(0));
headTestCase("test/test/1", singletonMap("pretty", "true"), equalTo(0));
}
public void testIndexExists() throws IOException {
createTestDoc();
headTestCase("test", emptyMap(), equalTo(0));
headTestCase("test", singletonMap("pretty", "true"), equalTo(0));
}
public void testTypeExists() throws IOException {
createTestDoc();
headTestCase("test/test", emptyMap(), equalTo(0));
headTestCase("test/test", singletonMap("pretty", "true"), equalTo(0));
}
private void headTestCase(String url, Map<String, String> params, Matcher<Integer> matcher) throws IOException {
Response response = client().performRequest("HEAD", url, params);
assertEquals(200, response.getStatusLine().getStatusCode());
assertThat(Integer.valueOf(response.getHeader("Content-Length")), matcher);
assertNull("HEAD requests shouldn't have a response body but " + url + " did", response.getEntity());
}
}
| 39.694444
| 116
| 0.709237
|
4deb7c0507faa636b53d1b302728940c84b13cb2
| 5,533
|
/**
* Copyright (C) 2011-2018 Red Hat, Inc. (https://github.com/Commonjava/indy)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.commonjava.indy.promote.ftest.rule;
import org.apache.commons.io.IOUtils;
import org.commonjava.indy.ftest.core.category.EventDependent;
import org.commonjava.indy.model.core.Group;
import org.commonjava.indy.model.core.HostedRepository;
import org.commonjava.indy.promote.model.GroupPromoteRequest;
import org.commonjava.indy.promote.model.GroupPromoteResult;
import org.commonjava.indy.promote.model.ValidationResult;
import org.commonjava.indy.promote.model.ValidationRuleSet;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collections;
import java.util.Map;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
/**
* Created by jdcasey on 9/23/15.
*/
public class ArtifactRefs_DependencyInAnotherRepoInGroup_RuleTest
extends AbstractValidationRuleTest<Group>
{
private static final String RULE = "artifact-refs-via.groovy";
private static final String PREFIX = "artifact-refs-via/";
private HostedRepository otherSource;
private Group other;
@Test
@Category( EventDependent.class )
public void run()
throws Exception
{
String invalid = "org/foo/invalid/1/invalid-1.pom";
String valid = "org/foo/valid/1.1/valid-1.1.pom";
String validDepPom = "org/bar/dep/1.0/dep-1.0.pom";
String validDepJar = "org/bar/dep/1.0/dep-1.0.jar";
String content = "this is some content";
client.content().store( otherSource.getKey(), validDepPom, new ByteArrayInputStream( content.getBytes() ) );
client.content().store( otherSource.getKey(), validDepJar, new ByteArrayInputStream( content.getBytes() ) );
InputStream stream = client.content().get( other.getKey(), validDepPom );
String retrieved = IOUtils.toString( stream );
stream.close();
assertThat( validDepPom + " invalid from: " + other.getKey(), retrieved, equalTo( content ) );
stream = client.content().get( other.getKey(), validDepJar );
retrieved = IOUtils.toString( stream );
stream.close();
assertThat( validDepJar + " invalid from: " + other.getKey(), retrieved, equalTo( content ) );
deployResource( invalid, PREFIX + "invalid-external-dep.pom.xml");
deployResource( valid, PREFIX + "valid-single-external-dep.pom.xml" );
waitForEventPropagation();
GroupPromoteRequest request = new GroupPromoteRequest( source.getKey(), target.getName() );
GroupPromoteResult result = module.promoteToGroup( request );
assertThat( result, notNullValue() );
ValidationResult validations = result.getValidations();
assertThat( validations, notNullValue() );
Map<String, String> validatorErrors = validations.getValidatorErrors();
assertThat( validatorErrors, notNullValue() );
System.out.println(validatorErrors);
String errors = validatorErrors.get( RULE );
assertThat( errors, notNullValue() );
System.out.println(validatorErrors);
assertThat( errors.contains( valid ), equalTo( false ) );
assertThat( errors.contains( invalid ), equalTo( true ) );
}
public ArtifactRefs_DependencyInAnotherRepoInGroup_RuleTest()
{
super( Group.class );
}
@Override
protected String getRuleScriptFile()
{
return RULE;
}
@Override
protected String getRuleScriptContent()
throws IOException
{
String path = "promote/rules/" + RULE;
return readTestResource( path );
}
@Override
protected ValidationRuleSet getRuleSet()
{
ValidationRuleSet ruleSet = new ValidationRuleSet();
ruleSet.setName( "test" );
ruleSet.setStoreKeyPattern( "group:target" );
ruleSet.setRuleNames( Collections.singletonList( getRuleScriptFile() ) );
ruleSet.setValidationParameters( Collections.singletonMap( "availableInStores", "group:other" ) );
return ruleSet;
}
@Override
public void start()
throws Throwable
{
super.start();
otherSource = new HostedRepository( "otherSource" );
otherSource = client.stores().create( otherSource, "Creating secondary content source", HostedRepository.class );
other = new Group( "other", source.getKey(), otherSource.getKey() );
other = client.stores().create( other, "Creating secondary content group", Group.class );
Logger logger = LoggerFactory.getLogger( getClass() );
logger.info( "{} contains members: {}", other, other.getConstituents() );
}
}
| 35.242038
| 121
| 0.694379
|
7ac5697dfb0a9478853ba22bdcc671e5d6f6112b
| 1,464
|
/*******************************************************************************
* Copyright (c) 2008 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM - Initial API and implementation
*******************************************************************************/
package org.eclipse.cdt.internal.core.indexer;
import org.eclipse.cdt.core.parser.IScannerInfo;
import org.eclipse.cdt.core.parser.IScannerInfoProvider;
/**
* Similar to IScannerInfoProvider but computes the IScannerInfo
* based on a String path instead of IResource.
*
* @see IScannerInfoProvider
*/
public interface IStandaloneScannerInfoProvider {
/**
* Returns an IScannerInfo for the given file path,
* or an empty IScannerInfo object if the file path is invalid.
*/
IScannerInfo getScannerInformation(String path);
/**
* Returns an IScannerInfo when you don't necessary have access to a path.
*
* This is used by the "parse up front" feature. Since we are parsing
* files outside of the project a "default" IScannerInfo object
* is needed to get the minimal amount of available info in order
* to parse the file.
* @param linkageID
*/
IScannerInfo getDefaultScannerInformation(int linkageID);
}
| 34.857143
| 81
| 0.670082
|
3d333668b0b54ccf2486eb88a162e409d56e6869
| 10,167
|
/*
* This file is part of the Heritrix web crawler (crawler.archive.org).
*
* Licensed to the Internet Archive (IA) by one or more individual
* contributors.
*
* The IA licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.archive.modules.fetcher;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.*;
import org.apache.commons.collections.collection.CompositeCollection;
import org.apache.http.client.CookieStore;
import org.apache.http.cookie.Cookie;
import org.archive.bdb.BdbModule;
import org.archive.checkpointing.Checkpoint;
import org.springframework.beans.factory.annotation.Autowired;
import com.google.common.net.InternetDomainName;
import com.sleepycat.bind.ByteArrayBinding;
import com.sleepycat.bind.serial.SerialBinding;
import com.sleepycat.bind.serial.StoredClassCatalog;
import com.sleepycat.collections.StoredCollection;
import com.sleepycat.collections.StoredSortedMap;
import com.sleepycat.je.Database;
import com.sleepycat.je.DatabaseException;
/**
* Cookie store using bdb for storage. Cookies are stored in a SortedMap keyed
* by {@link #sortableKey(Cookie)}, so they are grouped together by domain.
* {@link #cookieStoreFor(String)} returns a facade whose
* {@link CookieStore#getCookies()} returns a list of cookies limited to
* the supplied host and parent domains, if applicable.
*
* @see <a href="https://webarchive.jira.com/browse/HER-2070">https://webarchive.jira.com/browse/HER-2070</a>
* @see <a href="https://github.com/internetarchive/heritrix3/pull/96">https://github.com/internetarchive/heritrix3/pull/96</a>
* @see <a href="https://groups.yahoo.com/neo/groups/archive-crawler/conversations/messages/8620">https://groups.yahoo.com/neo/groups/archive-crawler/conversations/messages/8620</a>
*
* @author nlevitt
*/
public class BdbCookieStore extends AbstractCookieStore implements
FetchHTTPCookieStore, CookieStore {
/**
* A {@link List} implementation that wraps a {@link Collection}. Needed
* because httpclient requires {@code List<Cookie>}.
*
* <p>
* This class is "restricted" in the sense that it is immutable, and also
* because some methods throw {@link RuntimeException} for other reasons.
* For example, {@link #iterator()} is not implemented, because we use this
* class to wrap a bdb {@link StoredCollection}, and iterators from that
* class need to be explicitly closed. Since this class hides the fact that
* a StoredCollection underlies it, we simply prevent {@link #iterator()}
* from being used.
*/
public static class RestrictedCollectionWrappedList<T> implements List<T> {
private Collection<T> wrapped;
public RestrictedCollectionWrappedList(Collection<T> wrapped) { this.wrapped = wrapped; }
@Override public int size() { return wrapped.size(); }
@Override public boolean isEmpty() { throw new RuntimeException("not implemented"); }
@Override public boolean contains(Object o) { throw new RuntimeException("not implemented"); }
@Override public Iterator<T> iterator() { throw new RuntimeException("not implemented"); }
@Override public Object[] toArray() { return wrapped.toArray(); }
@SuppressWarnings("hiding") @Override public <T> T[] toArray(T[] a) { return wrapped.toArray(a); }
@Override public boolean add(T e) { throw new RuntimeException("immutable list"); }
@Override public boolean remove(Object o) { throw new RuntimeException("immutable list"); }
@Override public boolean containsAll(Collection<?> c) { return wrapped.containsAll(c); }
@Override public boolean addAll(Collection<? extends T> c) { throw new RuntimeException("immutable list"); }
@Override public boolean addAll(int index, Collection<? extends T> c) { throw new RuntimeException("immutable list"); }
@Override public boolean removeAll(Collection<?> c) { throw new RuntimeException("immutable list"); }
@Override public boolean retainAll(Collection<?> c) { throw new RuntimeException("immutable list"); }
@Override public void clear() { throw new RuntimeException("immutable list"); }
@Override public T get(int index) { throw new RuntimeException("not implemented"); }
@Override public T set(int index, T element) { throw new RuntimeException("immutable list"); }
@Override public void add(int index, T element) { throw new RuntimeException("immutable list"); }
@Override public T remove(int index) { throw new RuntimeException("immutable list"); }
@Override public int indexOf(Object o) { throw new RuntimeException("not implemented"); }
@Override public int lastIndexOf(Object o) { throw new RuntimeException("not implemented"); }
@Override public ListIterator<T> listIterator() { throw new RuntimeException("not implemented"); }
@Override public ListIterator<T> listIterator(int index) { throw new RuntimeException("not implemented"); }
@Override public List<T> subList(int fromIndex, int toIndex) { throw new RuntimeException("not implemented"); }
}
protected BdbModule bdb;
@Autowired
public void setBdbModule(BdbModule bdb) {
this.bdb = bdb;
}
public static String COOKIEDB_NAME = "hc_httpclient_cookies";
private transient Database cookieDb;
private transient StoredSortedMap<byte[],Cookie> cookies;
public void prepare() {
try {
StoredClassCatalog classCatalog = bdb.getClassCatalog();
BdbModule.BdbConfig dbConfig = new BdbModule.BdbConfig();
dbConfig.setTransactional(false);
dbConfig.setAllowCreate(true);
dbConfig.setSortedDuplicates(false);
cookieDb = bdb.openDatabase(COOKIEDB_NAME, dbConfig,
isCheckpointRecovery);
cookies = new StoredSortedMap<byte[],Cookie>(cookieDb,
new ByteArrayBinding(),
new SerialBinding<Cookie>(classCatalog, Cookie.class),
true);
} catch (DatabaseException e) {
throw new RuntimeException(e);
}
}
public void addCookieImpl(Cookie cookie) {
byte[] key;
try {
key = sortableKey(cookie).getBytes("UTF-8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e); // impossible
}
if (!cookie.isExpired(new Date())) {
cookies.put(key, cookie);
} else {
cookies.remove(key);
}
}
protected Collection<Cookie> hostSubset(String host) {
try {
byte[] startKey = (host + ";").getBytes("UTF-8");
char chAfterDelim = (char)(((int)';')+1);
byte[] endKey = (host + chAfterDelim).getBytes("UTF-8");
SortedMap<byte[], Cookie> submap = cookies.subMap(startKey, endKey);
return submap.values();
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e); // impossible
}
}
/**
* Returns a {@link LimitedCookieStoreFacade} whose
* {@link LimitedCookieStoreFacade#getCookies()} method returns only cookies
* from {@code host} and its parent domains, if applicable.
*/
public CookieStore cookieStoreFor(String host) {
CompositeCollection cookieCollection = new CompositeCollection();
if (InternetDomainName.isValid(host)) {
InternetDomainName domain = InternetDomainName.from(host);
while (domain != null) {
Collection<Cookie> subset = hostSubset(domain.toString());
cookieCollection.addComposited(subset);
if (domain.hasParent()) {
domain = domain.parent();
} else {
domain = null;
}
}
} else {
Collection<Cookie> subset = hostSubset(host.toString());
cookieCollection.addComposited(subset);
}
@SuppressWarnings("unchecked")
List<Cookie> cookieList = new ArrayList<>(cookieCollection);
LimitedCookieStoreFacade store = new LimitedCookieStoreFacade(cookieList);
return store;
}
@Override
public void startCheckpoint(Checkpoint checkpointInProgress) {
// do nothing; handled by map checkpoint via BdbModule
}
@Override
public void doCheckpoint(Checkpoint checkpointInProgress)
throws IOException {
// do nothing; handled by map checkpoint via BdbModule
}
@Override
public void finishCheckpoint(Checkpoint checkpointInProgress) {
// do nothing; handled by map checkpoint via BdbModule
}
/** are we a checkpoint recovery? (in which case, reuse stored cookie data?) */
protected boolean isCheckpointRecovery = false;
@Override
public void setRecoveryCheckpoint(Checkpoint recoveryCheckpoint) {
// just remember that we are doing checkpoint-recovery;
// actual state recovery happens via BdbModule
isCheckpointRecovery = true;
}
@Override
public void clear() {
cookies.clear();
}
/**
* @return an immutable list view of the cookies
*/
@Override
public List<Cookie> getCookies() {
if (cookies != null) {
return new RestrictedCollectionWrappedList<Cookie>(cookies.values());
} else {
return null;
}
}
@Override
public boolean clearExpired(Date date) {
throw new RuntimeException("not implemented");
}
}
| 43.448718
| 181
| 0.671781
|
a7c4a6fd4afebc4837d0cca9cfa774bb2b34da0b
| 833
|
/**
* This class was created by <Vazkii>. It's distributed as
* part of the Botania Mod. Get the Source Code in github:
* https://github.com/Vazkii/Botania
* <p>
* Botania is Open Source and distributed under the
* Botania License: http://botaniamod.net/license.php
*/
package reborncore.client.multiblock;
import net.minecraft.block.Block;
import net.minecraft.client.renderer.BlockRendererDispatcher;
import net.minecraft.world.IBlockAccess;
import java.util.HashMap;
import java.util.Map;
/**
* A hook for rendering blocks in the multiblock display.
*/
public interface IMultiblockRenderHook {
public static Map<Block, IMultiblockRenderHook> renderHooks = new HashMap();
public void renderBlockForMultiblock(IBlockAccess world, Multiblock mb, Block block, int meta, BlockRendererDispatcher renderBlocks);
}
| 29.75
| 137
| 0.771909
|
1f94dcfd0a1751794c1fb883fe23b76ed3be1a0e
| 1,080
|
package com.easyfilepicker;
/**
* Created by gurmail on 28/01/19.
*
* @author gurmail
*/
public class AppConstant {
interface MimeTypeConstants {
String MIME_TYPE_IMAGE_JPEG = "image/jpeg";
String MIME_TYPE_IMAGE_JPG = "image/pjpeg";
String MIME_TYPE_IMAGE_PNG = "image/png";
String MIME_TYPE_PDF = "application/pdf";
String MIME_TYPE_CSV_1 = "text/csv";
String MIME_TYPE_CSV_2 = "text/comma-separated-values";
String MIME_TYPE_DOC = "application/msword";
String MIME_TYPE_DOCX = "application/vnd.openxmlformats-officedocument.wordprocessingml.document";
String MIME_TYPE_PPT = "application/vnd.ms-powerpoint";
String MIME_TYPE_PPTX = "application/vnd.openxmlformats-officedocument.presentationml.presentation";
String MIME_TYPE_XLS = "application/vnd.ms-excel";
String MIME_TYPE_XLSX = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet";
String MIME_TYPE_TXT = "text/plain";
}
}
| 41.538462
| 114
| 0.667593
|
c139392d59beee52c7593a4091418e390941d290
| 1,563
|
package org.geekbang.thinking.in.spring.bean.lifecycle;
import org.geekbang.thinking.in.spring.ioc.overview.domain.SuperUser;
import org.geekbang.thinking.in.spring.ioc.overview.domain.User;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
import org.springframework.beans.factory.xml.XmlBeanDefinitionReader;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.Resource;
import org.springframework.core.io.support.EncodedResource;
/**
* {@link BeanDefinition} 合并 Demo
*
* @author ajin
*/
public class MergedBeanDefinitionDemo {
public static void main(String[] args) {
// 底层IoC容器
DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
// 基于xml的BeanDefinitionReader
XmlBeanDefinitionReader beanDefinitionReader = new XmlBeanDefinitionReader(beanFactory);
String location = "META-INF/dependency-lookup.xml";
// 基于Classpath 加载Properties资源
Resource resource = new ClassPathResource(location);
EncodedResource encodedResource = new EncodedResource(resource, "UTF-8");
int numbers=beanDefinitionReader.loadBeanDefinitions(encodedResource);
System.out.printf("已加载的BeanDefinition数量:%s\n",numbers);
SuperUser superUser = beanFactory.getBean("superUser", SuperUser.class);
System.out.println(superUser);
User user = beanFactory.getBean("user", User.class);
System.out.println(user);
}
}
| 34.733333
| 96
| 0.756238
|
72f451f880e281b7121fe142d2a848424e2a4657
| 9,384
|
/* Copyright Statement:
*
* This software/firmware and related documentation ("MediaTek Software") are
* protected under relevant copyright laws. The information contained herein
* is confidential and proprietary to MediaTek Inc. and/or its licensors.
* Without the prior written permission of MediaTek inc. and/or its licensors,
* any reproduction, modification, use or disclosure of MediaTek Software,
* and information contained herein, in whole or in part, shall be strictly prohibited.
*/
/* MediaTek Inc. (C) 2010. All rights reserved.
*
* BY OPENING THIS FILE, RECEIVER HEREBY UNEQUIVOCALLY ACKNOWLEDGES AND AGREES
* THAT THE SOFTWARE/FIRMWARE AND ITS DOCUMENTATIONS ("MEDIATEK SOFTWARE")
* RECEIVED FROM MEDIATEK AND/OR ITS REPRESENTATIVES ARE PROVIDED TO RECEIVER ON
* AN "AS-IS" BASIS ONLY. MEDIATEK EXPRESSLY DISCLAIMS ANY AND ALL WARRANTIES,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR NONINFRINGEMENT.
* NEITHER DOES MEDIATEK PROVIDE ANY WARRANTY WHATSOEVER WITH RESPECT TO THE
* SOFTWARE OF ANY THIRD PARTY WHICH MAY BE USED BY, INCORPORATED IN, OR
* SUPPLIED WITH THE MEDIATEK SOFTWARE, AND RECEIVER AGREES TO LOOK ONLY TO SUCH
* THIRD PARTY FOR ANY WARRANTY CLAIM RELATING THERETO. RECEIVER EXPRESSLY ACKNOWLEDGES
* THAT IT IS RECEIVER'S SOLE RESPONSIBILITY TO OBTAIN FROM ANY THIRD PARTY ALL PROPER LICENSES
* CONTAINED IN MEDIATEK SOFTWARE. MEDIATEK SHALL ALSO NOT BE RESPONSIBLE FOR ANY MEDIATEK
* SOFTWARE RELEASES MADE TO RECEIVER'S SPECIFICATION OR TO CONFORM TO A PARTICULAR
* STANDARD OR OPEN FORUM. RECEIVER'S SOLE AND EXCLUSIVE REMEDY AND MEDIATEK'S ENTIRE AND
* CUMULATIVE LIABILITY WITH RESPECT TO THE MEDIATEK SOFTWARE RELEASED HEREUNDER WILL BE,
* AT MEDIATEK'S OPTION, TO REVISE OR REPLACE THE MEDIATEK SOFTWARE AT ISSUE,
* OR REFUND ANY SOFTWARE LICENSE FEES OR SERVICE CHARGE PAID BY RECEIVER TO
* MEDIATEK FOR SUCH MEDIATEK SOFTWARE AT ISSUE.
*
* The following software/firmware and/or related documentation ("MediaTek Software")
* have been modified by MediaTek Inc. All revisions are subject to any receiver's
* applicable license agreements with MediaTek Inc.
*/
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.stk;
import com.android.internal.telephony.cat.CatLog;
import com.android.internal.telephony.Phone;
import com.android.internal.telephony.PhoneConstants;
import android.content.ComponentName;
import android.content.Context;
import android.content.pm.PackageManager;
/**
* Application installer for SIM Toolkit.
*
*/
class StkAppInstaller {
Context mContext;
private static StkAppInstaller mInstance = new StkAppInstaller();
private StkAppInstaller() {
}
public static StkAppInstaller getInstance(){
if (mInstance != null)
{
mInstance.initThread();
}
return mInstance;
}
private void initThread()
{
int i = 0;
for (i = 0; i < StkAppService.STK_GEMINI_SIM_NUM; i++)
{
if (installThread[i] == null)
{
CatLog.d("StkAppInstaller", "Init thread");
installThread[i] = new InstallThread();
miSTKInstalled[i] = -1;
}
if (uninstallThread[i] == null) uninstallThread[i] = new UnInstallThread();
}
}
public static final int STK_NOT_INSTALLED = 1;
public static final int STK_INSTALLED = 2;
//private static int miSTKInstalled = -1; // 1 -not_ready, 2-ready
private static int[] miSTKInstalled = new int[StkAppService.STK_GEMINI_SIM_NUM]; // 1 -not_ready, 2-ready
/* TODO: Gemini+ */
private static final String STK1_LAUNCHER_ACTIVITY = "com.android.stk.StkLauncherActivity";
private static final String STK2_LAUNCHER_ACTIVITY = "com.android.stk.StkLauncherActivityII";
private static final String STK3_LAUNCHER_ACTIVITY = "com.android.stk.StkLauncherActivityIII";
private static final String STK4_LAUNCHER_ACTIVITY = "com.android.stk.StkLauncherActivityIV";
void install(Context context, int sim_id) {
if (installThread[sim_id] != null)
{
mContext = context;
installThread[sim_id].setSim(sim_id);
new Thread(installThread[sim_id]).start();
}
}
void unInstall(Context context, int sim_id) {
if (uninstallThread[sim_id] != null)
{
mContext = context;
uninstallThread[sim_id].setSim(sim_id);
new Thread(uninstallThread[sim_id]).start();
}
}
private static void setAppState(Context context, boolean install, int sim_id) {
CatLog.d("StkAppInstaller", "[setAppState]+");
if (context == null) {
return;
}
PackageManager pm = context.getPackageManager();
if (pm == null) {
return;
}
// check that STK app package is known to the PackageManager
/* TODO: Gemini+ begin */
String class_name = STK1_LAUNCHER_ACTIVITY;
switch (sim_id)
{
case PhoneConstants.GEMINI_SIM_1:
class_name = STK1_LAUNCHER_ACTIVITY;
break;
case PhoneConstants.GEMINI_SIM_2:
class_name = STK2_LAUNCHER_ACTIVITY;
break;
case PhoneConstants.GEMINI_SIM_3:
class_name = STK3_LAUNCHER_ACTIVITY;
break;
case PhoneConstants.GEMINI_SIM_4:
class_name = STK4_LAUNCHER_ACTIVITY;
break;
default:
CatLog.d("StkAppInstaller", "setAppState, ready to return because sim id " + sim_id +" is wrong.");
return;
}
/* TODO: Gemini+ end */
CatLog.d("StkAppInstaller", "setAppState, target class name: " + class_name);
ComponentName cName = new ComponentName("com.android.stk", class_name);
ComponentName cNameMenu = new ComponentName("com.android.stk",
"com.android.stk.StkMenuActivity");
int state = install ? PackageManager.COMPONENT_ENABLED_STATE_ENABLED
: PackageManager.COMPONENT_ENABLED_STATE_DISABLED;
CatLog.d("StkAppInstaller", "Stk1 - setAppState - curState[" + miSTKInstalled[sim_id] + "] to state[" + install + "]" );
if (((PackageManager.COMPONENT_ENABLED_STATE_ENABLED == state) && (STK_INSTALLED == miSTKInstalled[sim_id])) ||
((PackageManager.COMPONENT_ENABLED_STATE_DISABLED == state) && (STK_NOT_INSTALLED == miSTKInstalled[sim_id])))
{
CatLog.d("StkAppInstaller", "Stk " + sim_id + " - Need not change app state!!");
} else {
CatLog.d("StkAppInstaller", "Stk " + sim_id + "- StkAppInstaller - Change app state[" + install + "]");
miSTKInstalled[sim_id] = install ? STK_INSTALLED : STK_NOT_INSTALLED;
try {
pm.setComponentEnabledSetting(cName, state, PackageManager.DONT_KILL_APP);
// pm.setComponentEnabledSetting(cNameMenu, state, PackageManager.DONT_KILL_APP);
} catch (Exception e) {
CatLog.d("StkAppInstaller", "Could not change STK1 app state");
}
}
CatLog.d("StkAppInstaller", "[setAppState]-");
}
private class InstallThread implements Runnable{
private int mSimId = -1;
@Override
public void run(){
CatLog.d("StkAppInstaller", "InstallThread, run , sim id: " + mSimId);
if (mSimId >= 0 && mSimId < StkAppService.STK_GEMINI_SIM_NUM)
setAppState(mContext, true, mSimId);
}
public void setSim(int sim_id)
{
CatLog.d("StkAppInstaller", "InstallThread, sim id: " + sim_id);
mSimId = sim_id;
}
}
private class UnInstallThread implements Runnable{
private int mSimId = -1;
@Override
public void run(){
CatLog.d("StkAppInstaller", "UninstallThread, run , sim id: " + mSimId);
if (mSimId >= 0 && mSimId < StkAppService.STK_GEMINI_SIM_NUM)
setAppState(mContext, false, mSimId);
}
public void setSim(int sim_id)
{
CatLog.d("StkAppInstaller", "UninstallThread, sim id: " + sim_id);
mSimId = sim_id;
}
}
private InstallThread[] installThread = new InstallThread[StkAppService.STK_GEMINI_SIM_NUM];
private UnInstallThread[] uninstallThread = new UnInstallThread[StkAppService.STK_GEMINI_SIM_NUM];
public static int getIsInstalled(int sim_id) {
CatLog.d("StkAppInstaller", "getIsInstalled, sim id: " + sim_id + ", install status: " + miSTKInstalled[sim_id]);
return miSTKInstalled[sim_id];
}
}
| 42.654545
| 128
| 0.663257
|
96cbd19d80039cc95beac1c1dfa5a596f649bc0a
| 1,888
|
package com.journaldev.mongodb.servlets;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.List;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONException;
import org.json.JSONObject;
import com.journaldev.mongodb.dao.MongoDBDataDAO;
import com.journaldev.mongodb.dao.MongoDBUsersDAO;
import com.mongodb.MongoClient;
/**
* Servlet implementation class GetUserInteractionServlet
*/
@WebServlet("/user_interactions")
public class GetUserInteractionServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
String user_name = request.getParameter("user_name");
MongoClient mongo = (MongoClient) request.getServletContext()
.getAttribute("MONGO_CLIENT");
MongoDBDataDAO dataDAO = new MongoDBDataDAO(mongo);
MongoDBUsersDAO userDAO = new MongoDBUsersDAO(mongo);
if (userDAO.getUserByName(user_name) == null) {
response.sendError(HttpServletResponse.SC_NOT_FOUND);
return;
}
List<String> user_interactions = dataDAO.getAllData(user_name);
response.setContentType("application/json");
response.setHeader("Cache-Control", "nocache");
response.setCharacterEncoding("utf-8");
PrintWriter printout = response.getWriter();
JSONObject JObject = new JSONObject();
try {
JObject.put("interactions", user_interactions.toString());
} catch (JSONException excep) {
System.out.println("JSON Exception");
}
printout.print(JObject);
printout.flush();
}
}
| 29.968254
| 118
| 0.766949
|
0c2ca4ca88ca759af4c62dbc3ae1d273f0fec240
| 997
|
package com.tgb.lk.demo.gson.test6;
import java.lang.reflect.Type;
import com.google.gson.JsonDeserializationContext;
import com.google.gson.JsonDeserializer;
import com.google.gson.JsonElement;
import com.google.gson.JsonParseException;
import com.google.gson.JsonPrimitive;
import com.google.gson.JsonSerializationContext;
import com.google.gson.JsonSerializer;
public class EnumSerializer implements JsonSerializer<PackageState>,
JsonDeserializer<PackageState> {
// 对象转为Json时调用,实现JsonSerializer<PackageState>接口
@Override
public JsonElement serialize(PackageState state, Type arg1,
JsonSerializationContext arg2) {
return new JsonPrimitive(state.ordinal());
}
// json转为对象时调用,实现JsonDeserializer<PackageState>接口
@Override
public PackageState deserialize(JsonElement json, Type typeOfT,
JsonDeserializationContext context) throws JsonParseException {
if (json.getAsInt() < PackageState.values().length)
return PackageState.values()[json.getAsInt()];
return null;
}
}
| 30.212121
| 68
| 0.808425
|
8be40fe0bb7d44c44bccf7230fdd15194fcde33e
| 1,208
|
/**
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2017 the original author or authors.
*/
package org.assertj.core.error;
import java.util.concurrent.atomic.AtomicStampedReference;
public class ShouldHaveStamp extends BasicErrorMessageFactory {
private static final String SHOULD_HAVE_STAMP = "%nExpecting%n <%s>%nto have stamp:%n <%s>%nbut had:%n <%s>";
private ShouldHaveStamp(AtomicStampedReference<?> actual, int expectedStamp) {
super(SHOULD_HAVE_STAMP, actual, expectedStamp, actual.getStamp());
}
public static ErrorMessageFactory shouldHaveStamp(AtomicStampedReference<?> actual, int expectedStamp) {
return new ShouldHaveStamp(actual, expectedStamp);
}
}
| 41.655172
| 118
| 0.764901
|
b7b383ed5efbd09f48f3498fcc3ce7f22b0944db
| 1,473
|
/*
* Copyright 2021 ACC Cyfronet AGH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package pl.cyfronet.s4e.sync;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import pl.cyfronet.s4e.sync.context.Context;
import pl.cyfronet.s4e.sync.step.Step;
import java.util.List;
@RequiredArgsConstructor
@Slf4j
public class SceneAcceptorImpl implements SceneAcceptor {
private final PipelineFactory pipelineFactory;
@Override
public Error accept(Context context) {
Error result = null;
try {
List<Step<Context, Error>> pipeline = pipelineFactory.build();
for (Step<Context, Error> step : pipeline) {
result = step.apply(context);
if (result != null) {
break;
}
}
} catch (RuntimeException e) {
result = context.getError().cause(e).build();
}
return result;
}
}
| 30.061224
| 75
| 0.66463
|
fa0b7a1600ad7f3727e7b1c7ba15479a77f22dc8
| 1,531
|
package edu.utd.minecraft.mod.polycraft.privateproperty.network;
import java.nio.charset.StandardCharsets;
import java.util.List;
import io.netty.buffer.ByteBuf;
import net.minecraft.util.BlockPos;
import net.minecraftforge.fml.common.network.simpleimpl.IMessage;
public class TeleportMessage implements IMessage{
public BlockPos targetPos;
public float yaw, pitch;
public String args;
public TeleportMessage()
{
}
public TeleportMessage(List<Object> params)
{
if (params.size() == 2) {
this.args = (String)params.get(0);
this.targetPos = (BlockPos)params.get(1);
}
}
public TeleportMessage(BlockPos targetPos, String args, float yaw, float pitch)
{
this.args = args;
this.targetPos = targetPos;
this.yaw = yaw;
this.pitch = pitch;
}
@Override
public void fromBytes(ByteBuf buf)
{
args = StandardCharsets.UTF_8.decode(buf.readBytes(buf.readInt()).nioBuffer()).toString();
targetPos = new BlockPos(buf.readInt(), buf.readInt(), buf.readInt());
yaw = buf.readFloat();
pitch = buf.readFloat();
}
@Override
public void toBytes(ByteBuf buf)
{
buf.writeInt(args.length());
buf.writeBytes(args.getBytes());
buf.writeInt(targetPos.getX());
buf.writeInt(targetPos.getY());
buf.writeInt(targetPos.getZ());
buf.writeFloat(yaw);
buf.writeFloat(pitch);
}
}
| 26.396552
| 96
| 0.621163
|
18f5de9c1302b0787a59c9493850018fb752f8a3
| 222
|
package test.helloworld.web.service;
import java.util.List;
import test.helloworld.web.bean.UploadInfo;
public interface UploadInfoService {
int addRecord(UploadInfo record);
List<UploadInfo> selectRecord();
}
| 18.5
| 43
| 0.774775
|
1e4ba4b99f6f099841061cdb048c3652bb653f04
| 360
|
package cetus.hir;
import java.util.ArrayList;
import java.util.List;
/** This class is not supported */
public class UsingDirective extends Declaration {
@SuppressWarnings("unchecked")
public List<IDExpression> getDeclaredIDs() {
return (List<IDExpression>) empty_list;
}
public String toString() {
return "";
}
}
| 18.947368
| 49
| 0.669444
|
6177f2e3ce5410e189b3b60a2a3ec637cfe2abee
| 2,130
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.tools.ant.types.resources;
import org.apache.tools.ant.BuildException;
/**
* ResourceCollection that imposes a size limit on another ResourceCollection.
* @since Ant 1.7.1
*/
public abstract class SizeLimitCollection extends BaseResourceCollectionWrapper {
private static final String BAD_COUNT
= "size-limited collection count should be set to an int >= 0";
private int count = 1;
/**
* Set the number of resources to be included.
* @param i the count as <code>int</count>.
*/
public synchronized void setCount(int i) {
checkAttributesAllowed();
count = i;
}
/**
* Get the number of resources to be included. Default is 1.
* @return the count as <code>int</count>.
*/
public synchronized int getCount() {
return count;
}
/**
* Efficient size implementation.
* @return int size
*/
@Override
public synchronized int size() {
return Math.min(getResourceCollection().size(), getValidCount());
}
/**
* Get the count, verifying it is >= 0.
* @return int count
*/
protected int getValidCount() {
int ct = getCount();
if (ct < 0) {
throw new BuildException(BAD_COUNT);
}
return ct;
}
}
| 30
| 81
| 0.661972
|
c60cbf16092383fa3bef119f13851917ac288d71
| 6,096
|
package org.recap.report;
import org.apache.camel.ProducerTemplate;
import org.apache.commons.lang3.StringUtils;
import org.recap.ScsbCommonConstants;
import org.recap.ScsbConstants;
import org.recap.model.csv.AccessionSummaryRecord;
import org.recap.model.csv.SolrExceptionReportCSVRecord;
import org.recap.model.csv.SubmitCollectionReportRecord;
import org.recap.model.jpa.ReportEntity;
import org.recap.util.AccessionSummaryRecordGenerator;
import org.recap.util.SolrExceptionCSVRecordGenerator;
import org.recap.util.SubmitCollectionReportGenerator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StopWatch;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import java.util.function.Predicate;
public class CommonReportGenerator {
private static final Logger logger = LoggerFactory.getLogger(CommonReportGenerator.class);
@Autowired
private ProducerTemplate producerTemplate;
public String generateSubmitCollectionReportFile(String fileName, List<ReportEntity> reportEntityList, String reportQueue) {
String generatedFileName;
String[] fileNameSplit = null;
List<SubmitCollectionReportRecord> submitCollectionReportRecordList = new ArrayList<>();
SubmitCollectionReportGenerator submitCollectionReportGenerator = new SubmitCollectionReportGenerator();
for (ReportEntity reportEntity : reportEntityList) {
List<SubmitCollectionReportRecord> submitCollectionReportRecords = submitCollectionReportGenerator.prepareSubmitCollectionRejectionRecord(reportEntity);
submitCollectionReportRecordList.addAll(submitCollectionReportRecords);
}
DateFormat df = new SimpleDateFormat(ScsbConstants.DATE_FORMAT_FOR_FILE_NAME);
Predicate<String> checkForProtectionOrNotProtectionKeyword = p-> p.contains(ScsbConstants.PROTECTED) || p.contains(ScsbConstants.NOT_PROTECTED);
if (checkForProtectionOrNotProtectionKeyword.test(fileName)) {
fileNameSplit = fileName.split("/", 5);
generatedFileName = ScsbConstants.SUBMIT_COLLECTION_REPORTS_BASE_PATH + fileNameSplit[2] + ScsbCommonConstants.PATH_SEPARATOR +fileNameSplit[3] + ScsbCommonConstants.PATH_SEPARATOR+ ScsbCommonConstants.SUBMIT_COLLECTION_REPORT + "-" + fileNameSplit[4] + "-" + df.format(new Date()) + ".csv";
} else {
generatedFileName = fileName + "-" + df.format(new Date()) + ".csv";
}
if (StringUtils.containsIgnoreCase(reportQueue, ScsbConstants.SUBMIT_COLLECTION_SUMMARY_Q_SUFFIX)) {
fileName = generatedFileName;
}
if (checkForProtectionOrNotProtectionKeyword.test(fileName) && Objects.requireNonNull(fileNameSplit)[2] != null && Objects.requireNonNull(fileNameSplit)[3] != null && Objects.requireNonNull(fileNameSplit)[4] != null) {
producerTemplate.sendBodyAndHeader(reportQueue, submitCollectionReportRecordList, ScsbConstants.FILE_NAME, fileNameSplit[2] + ScsbCommonConstants.PATH_SEPARATOR + fileNameSplit[3] + ScsbCommonConstants.PATH_SEPARATOR + ScsbCommonConstants.SUBMIT_COLLECTION_REPORT + "-" + fileNameSplit[4] + "-" + df.format(new Date()) + ".csv");
} else {
producerTemplate.sendBodyAndHeader(reportQueue, submitCollectionReportRecordList, ScsbConstants.FILE_NAME, fileName);
}
return generatedFileName;
}
public String generateAccessionReportFile(String fileName, List<ReportEntity> reportEntityList, String reportQueue) {
String generatedFileName;
List<AccessionSummaryRecord> accessionSummaryRecordList;
AccessionSummaryRecordGenerator accessionSummaryRecordGenerator = new AccessionSummaryRecordGenerator();
accessionSummaryRecordList = accessionSummaryRecordGenerator.prepareAccessionSummaryReportRecord(reportEntityList);
producerTemplate.sendBodyAndHeader(reportQueue, accessionSummaryRecordList, ScsbConstants.FILE_NAME, fileName);
DateFormat df = new SimpleDateFormat(ScsbCommonConstants.DATE_FORMAT_FOR_FILE_NAME);
generatedFileName = fileName + "-" + df.format(new Date()) + ".csv";
return generatedFileName;
}
public String generateReportForSolrExceptionCsvRecords(String fileName, String queueName, List<ReportEntity> reportEntityList) {
List<SolrExceptionReportCSVRecord> solrExceptionReportCSVRecords = getSolrExceptionReportReCAPCSVRecords(reportEntityList);
logger.info("Total Num of CSVRecords Prepared : {} ", solrExceptionReportCSVRecords.size());
if (!CollectionUtils.isEmpty(solrExceptionReportCSVRecords)) {
producerTemplate.sendBodyAndHeader(queueName, solrExceptionReportCSVRecords, ScsbCommonConstants.REPORT_FILE_NAME, fileName);
DateFormat df = new SimpleDateFormat(ScsbConstants.DATE_FORMAT_FOR_FILE_NAME);
return fileName + "-" + df.format(new Date()) + ".csv";
}
return null;
}
public List<SolrExceptionReportCSVRecord> getSolrExceptionReportReCAPCSVRecords(List<ReportEntity> reportEntityList) {
StopWatch stopWatch = new StopWatch();
stopWatch.start();
List<SolrExceptionReportCSVRecord> solrExceptionReportCSVRecords = new ArrayList<>();
SolrExceptionCSVRecordGenerator solrExceptionCSVRecordGenerator = new SolrExceptionCSVRecordGenerator();
for (ReportEntity reportEntity : reportEntityList) {
SolrExceptionReportCSVRecord solrExceptionReportCSVRecord = solrExceptionCSVRecordGenerator.prepareSolrExceptionReportCSVRecord(reportEntity, new SolrExceptionReportCSVRecord());
solrExceptionReportCSVRecords.add(solrExceptionReportCSVRecord);
}
stopWatch.stop();
logger.info("Total time taken to prepare CSVRecords : {} ", stopWatch.getTotalTimeSeconds());
return solrExceptionReportCSVRecords;
}
}
| 58.057143
| 341
| 0.772146
|
f7e8fb1c8aa972a57077a07b5f8dbc53506dd2b9
| 1,347
|
package ru.otus.io;
import java.util.Arrays;
import java.util.Objects;
public class BagOfPrimitivesAndArray{
private final int value1;
private final String value2;
private final int value3;
private final int[] array;
public BagOfPrimitivesAndArray(int value1, String value2, int value3) {
this.value1 = value1;
this.value2 = value2;
this.value3 = value3;
this.array = new int[] {12, 20};
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
BagOfPrimitivesAndArray that = (BagOfPrimitivesAndArray) o;
return value1 == that.value1 &&
value3 == that.value3 &&
Objects.equals(value2, that.value2) &&
Arrays.equals(array, that.array);
}
@Override
public int hashCode() {
int result = Objects.hash(value1, value2, value3);
result = 31 * result + Arrays.hashCode(array);
return result;
}
@Override
public String toString() {
return "BagOfPrimitivesAndArray{" +
"value1=" + value1 +
", value2='" + value2 + '\'' +
", value3=" + value3 +
", array=" + Arrays.toString(array) +
'}';
}
}
| 28.659574
| 75
| 0.56199
|
3fa79b87f929dbee1989803e9aa011d7ac9236d5
| 944
|
package com.spring.cloud.gateway;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.client.SpringCloudApplication;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.cloud.gateway.route.RouteLocator;
import org.springframework.cloud.gateway.route.builder.RouteLocatorBuilder;
import org.springframework.context.annotation.Bean;
@SpringBootApplication
@EnableDiscoveryClient
public class SpringGatewayApplication {
public static void main(String[] args) {
SpringApplication.run(SpringGatewayApplication.class, args);
}
@Bean
public RouteLocator customRouteLocator(RouteLocatorBuilder builder) {
return builder.routes()
.route("article", r -> r.path("/test/**").uri("lb://alibaba-nacos-discovery-client"))
.build();
}
}
| 33.714286
| 101
| 0.769068
|
97852d9b1f41c6f18642b856c197159af743c003
| 2,041
|
//import org.junit.jupiter.api.Test;
//import org.springframework.boot.test.context.SpringBootTest;
//
//@SpringBootTest
//class ElectricPowerApplicationTests {
//
// @Test
// void contextLoads() {
// }
//
// public static final int FLAG = -32523523;
//
// /**
// * byte数组转hex
// * @param bytes
// * @return
// */
// public static String byteToHex(byte[] bytes){
// String strHex = "";
// StringBuilder sb = new StringBuilder("");
// for (int n = 0; n < bytes.length; n++) {
// strHex = Integer.toHexString(bytes[n] & 0xFF);
// sb.append((strHex.length() == 1) ? "0" + strHex : strHex); // 每个字节由两个字符表示,位数不够,高位补0
// }
// return sb.toString().trim();
// }
//
//
// /**
// * hex转byte数组
// * @param hex
// * @return
// */
// public static byte[] hexToByte(String hex){
// int m = 0, n = 0;
// int byteLen = hex.length() / 2; // 每两个字符描述一个字节
// byte[] ret = new byte[byteLen];
// for (int i = 0; i < byteLen; i++) {
// m = i * 2 + 1;
// n = m + 1;
// System.out.println("0x" + hex.substring(i * 2, m) + hex.substring(m, n));
// int intVal = Integer.decode("0x" + hex.substring(i * 2, m) + hex.substring(m, n));
// ret[i] = Byte.valueOf((byte)intVal);
// }
// return ret;
// }
//
//
//
// //fe0fbafd
// //0xfe
// //0x0f
// //0xba
// //0xfd
// public static void main(String[] args) {
// //integer -> hex str -> byte arrs
// String s = Integer.toHexString(FLAG);
// System.out.println(s);
// System.out.println(hexToByte(s).toString());
// System.out.println("===========================");
// // byte arrs -> hex str -> integer
// byte[] bytes=new byte[]{(byte) 0xfe,(byte)0x0f, (byte) 0xba, (byte) 0xfd};
// String s1 = byteToHex(bytes);
// System.out.println(s1);
// System.out.println(Integer.parseInt("FE0FBAFD", 10));
//
//
// }
//
//}
| 28.746479
| 97
| 0.494855
|
afe7208c87a001146a87c7f1e93df00a3467e1dd
| 3,256
|
package com.u8.sdk.plugin;
import com.u8.sdk.IAnalytics;
import com.u8.sdk.base.PluginFactory;
public class U8Analytics {
private static U8Analytics instance;
private IAnalytics analyticsPlugin;
public static U8Analytics getInstance() {
if (instance == null) {
instance = new U8Analytics();
}
return instance;
}
public void init() {
this.analyticsPlugin = (IAnalytics) PluginFactory.getInstance().initPlugin(5);
}
public boolean isSupport(String method) {
if (this.analyticsPlugin == null) {
return false;
}
return this.analyticsPlugin.isSupportMethod(method);
}
public void startLevel(String level) {
if (this.analyticsPlugin == null) {
return;
}
this.analyticsPlugin.startLevel(level);
}
public void failLevel(String level) {
if (this.analyticsPlugin == null) {
return;
}
this.analyticsPlugin.failLevel(level);
}
public void finishLevel(String level) {
if (this.analyticsPlugin == null) {
return;
}
this.analyticsPlugin.finishLevel(level);
}
public void startTask(String task, String type) {
if (this.analyticsPlugin == null) {
return;
}
this.analyticsPlugin.startTask(task, type);
}
public void failTask(String task) {
if (this.analyticsPlugin == null) {
return;
}
this.analyticsPlugin.failTask(task);
}
public void finishTask(String task) {
if (this.analyticsPlugin == null) {
return;
}
this.analyticsPlugin.finishTask(task);
}
public void payRequest(String orderID, String productID, double money, String currency) {
if (this.analyticsPlugin == null)
return;
this.analyticsPlugin.payRequest(orderID, productID, money, currency);
}
public void pay(String orderID, double money, int num) {
if (this.analyticsPlugin == null) {
return;
}
this.analyticsPlugin.pay(orderID, money, num);
}
public void buy(String item, int num, double price) {
if (this.analyticsPlugin == null) {
return;
}
this.analyticsPlugin.buy(item, num, price);
}
public void use(String item, int num, double price) {
if (this.analyticsPlugin == null) {
return;
}
this.analyticsPlugin.use(item, num, price);
}
public void bonus(String item, int num, double price, int trigger) {
if (this.analyticsPlugin == null) {
return;
}
this.analyticsPlugin.bonus(item, num, price, trigger);
}
public void login(String userID) {
if (this.analyticsPlugin == null) {
return;
}
this.analyticsPlugin.login(userID);
}
public void logout() {
if (this.analyticsPlugin == null) {
return;
}
this.analyticsPlugin.logout();
}
public void levelup(int level) {
if (this.analyticsPlugin == null) {
return;
}
this.analyticsPlugin.levelup(level);
}
}
| 21.852349
| 93
| 0.575553
|
5ea7ade064aa1471e0965b25615d2ee188ab3d75
| 470
|
package org.apache.poi.hssf.eventusermodel.dummyrecord;
import org.apache.poi.hssf.eventusermodel.dummyrecord.DummyRecordBase;
public final class MissingCellDummyRecord extends DummyRecordBase {
private int row;
private int column;
public MissingCellDummyRecord(int row, int column) {
this.row = row;
this.column = column;
}
public int getRow() {
return this.row;
}
public int getColumn() {
return this.column;
}
}
| 19.583333
| 70
| 0.702128
|
19399708e7825d89ce60e9064ed1b6e5f0ff5ef8
| 10,072
|
package com.teamclay.MagnticTech;
import ibxm.Player;
import java.util.ArrayList;
import java.util.List;
import com.teamclay.MagnticTech.Block.BlockBoron;
import com.teamclay.MagnticTech.Block.BlockMagnetite_N;
import com.teamclay.MagnticTech.Block.BlockMagnetite_S;
import com.teamclay.MagnticTech.Block.BlockNeodymium;
import com.teamclay.MagnticTech.Block.BlockSeptumMagnet;
import com.teamclay.MagnticTech.Item.ItemBoronDust;
import com.teamclay.MagnticTech.Item.ItemMagnet_N;
import com.teamclay.MagnticTech.Item.ItemMagnet_S;
import com.teamclay.MagnticTech.Item.ItemNeodymium;
import com.teamclay.MagnticTech.Item.ItemSeptumMagnetAxe;
import com.teamclay.MagnticTech.Item.ItemSeptumMagnetHoe;
import com.teamclay.MagnticTech.Item.ItemSeptumMagnetIronIngot;
import com.teamclay.MagnticTech.Item.ItemSeptumMagnetPickaxe;
import com.teamclay.MagnticTech.Item.ItemSeptumMagnetShovel;
import com.teamclay.MagnticTech.Item.ItemSeptumMagnetSword;
import com.teamclay.MagnticTech.Item.ItemWrench;
import com.teamclay.MagnticTech.Machine.MagneticFurnace;
//import com.teamclay.MagnticTech.TileEntity.TileEntityMagneticFurnace;
import com.teamclay.MagnticTech.TileEntity.TileEntityMagneticFurnace;
import com.teamclay.MagnticTech.gui.GuiHandler;
import net.minecraft.block.Block;
import net.minecraft.client.Minecraft;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.item.Item;
import net.minecraft.item.ItemAxe;
import net.minecraft.item.ItemHoe;
import net.minecraft.item.ItemPickaxe;
import net.minecraft.item.ItemSpade;
import net.minecraft.item.ItemStack;
import net.minecraft.item.ItemSword;
import net.minecraft.item.Item.ToolMaterial;
import net.minecraft.tileentity.TileEntityFurnace;
import net.minecraft.util.ChatComponentText;
import net.minecraftforge.client.event.RenderGameOverlayEvent;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.common.util.EnumHelper;
import net.minecraftforge.common.util.ForgeDirection;
import net.minecraftforge.event.entity.player.PlayerInteractEvent;
import net.minecraftforge.event.entity.player.PlayerInteractEvent.Action;
import net.minecraftforge.event.entity.player.PlayerUseItemEvent;
import net.minecraftforge.oredict.OreDictionary;
import cpw.mods.fml.common.FMLCommonHandler;
import cpw.mods.fml.common.Mod;
import cpw.mods.fml.common.Mod.EventHandler;
import cpw.mods.fml.common.event.FMLInitializationEvent;
import cpw.mods.fml.common.event.FMLPostInitializationEvent;
import cpw.mods.fml.common.event.FMLPreInitializationEvent;
import cpw.mods.fml.common.eventhandler.SubscribeEvent;
import cpw.mods.fml.common.network.NetworkRegistry;
import cpw.mods.fml.common.registry.GameRegistry;
@Mod(modid="MagneticTech", name="Magnetic-Tech", version="1.01.05")
public class MagneticTech {
public static MagneticTech instance;
public static Block Magnetite_N;
public static Block Magnetite_S;
public static Block Septum_Magnet;
public static Block Boron_Ore;
public static Block Neodymium_ore;
public static Item Magnet_S;
public static Item Magnet_N;
public static Item Septum_Magnet_Iron_Ingot;
public static Item Boron_dust;
public static Item Neodymium_dust;
public static Item Wrench;
public static ItemSword SeptumMagnet_Sword;
public static ItemPickaxe SeptumMagnet_Pickaxe;
public static ItemSpade SeptumMagnet_Shovel;
public static ItemAxe SeptumMagnet_Axe;
public static ItemHoe SeptumMagnet_Hoe;
public static CreativeTabs MT_BASE = null;
public static Block Machine_MagneticFurnace;
public static List<String> machines = new ArrayList<String>();
@EventHandler
public void preLoad(FMLPreInitializationEvent event)
{
RegisterCreativeTabs();
RegisterItem();
RegisterBlock();
NetworkRegistry.INSTANCE.registerGuiHandler(this, new GuiHandler());
}
@EventHandler
public void load(FMLInitializationEvent event)
{
RegisterRepice();
GameRegistry.registerWorldGenerator(new Ore(), 5);
FMLCommonHandler.instance().bus().register(this);
MinecraftForge.EVENT_BUS.register(this);
}
@EventHandler
public void postLoad(FMLPostInitializationEvent event)
{
}
public void RegisterOreDic(){
OreDictionary.registerOre("oreMagnetite", this.Magnet_N);
OreDictionary.registerOre("oreMagnetite", this.Magnet_S);
OreDictionary.registerOre("oreSeptumMagnet", this.Septum_Magnet);
OreDictionary.registerOre("oreBoron", this.Boron_Ore);
}
public void RegisterTileEntity(){
GameRegistry.registerTileEntity(TileEntityMagneticFurnace.class, "MagneticFurnace");
}
public void RegisterBlock(){
Magnetite_N = new BlockMagnetite_N();
Magnetite_S = new BlockMagnetite_S();
Septum_Magnet = new BlockSeptumMagnet();
Boron_Ore = new BlockBoron();
Neodymium_ore = new BlockNeodymium();
Machine_MagneticFurnace = new MagneticFurnace();
GameRegistry.registerBlock(Magnetite_N, "magnetiten");
GameRegistry.registerBlock(Magnetite_S, "magnetites");
GameRegistry.registerBlock(Septum_Magnet, "septummagnet");
GameRegistry.registerBlock(Boron_Ore, "boron_ore");
GameRegistry.registerBlock(Neodymium_ore, "neodymium_ore");
GameRegistry.registerBlock(Machine_MagneticFurnace, "magnetic_furnace");
machines.add(this.Machine_MagneticFurnace.getUnlocalizedName());
}
public void RegisterItem(){
Magnet_S = new ItemMagnet_S();
Magnet_N = new ItemMagnet_N();
Boron_dust = new ItemBoronDust();
Neodymium_dust = new ItemNeodymium();
GameRegistry.registerItem(Magnet_S, "magnets");
GameRegistry.registerItem(Magnet_N, "magnetn");
GameRegistry.registerItem(Boron_dust, "boron_dust");
GameRegistry.registerItem(Neodymium_dust, "neodymium_dust");
this.Septum_Magnet_Iron_Ingot.setCreativeTab(this.MT_BASE);
SeptumMagnet_Sword = new ItemSeptumMagnetSword();
GameRegistry.registerItem(SeptumMagnet_Sword, "septum_magnet_sword");
SeptumMagnet_Pickaxe = new ItemSeptumMagnetPickaxe();
GameRegistry.registerItem(SeptumMagnet_Pickaxe, "septum_magnet_pickaxe");
SeptumMagnet_Shovel = new ItemSeptumMagnetShovel();
GameRegistry.registerItem(SeptumMagnet_Shovel, "septum_magnet_shovel");
SeptumMagnet_Axe = new ItemSeptumMagnetAxe();
GameRegistry.registerItem(SeptumMagnet_Axe, "septum_magnet_axe");
SeptumMagnet_Hoe = new ItemSeptumMagnetHoe();
GameRegistry.registerItem(SeptumMagnet_Hoe, "septum_magnet_hoe");
Wrench = new ItemWrench();
GameRegistry.registerItem(Wrench, "wrench");
}
public void RegisterRepice(){
GameRegistry.addSmelting(Magnetite_N, new ItemStack(Magnet_N), 1F);
GameRegistry.addSmelting(Magnetite_S, new ItemStack(Magnet_S), 1F);
GameRegistry.addSmelting(Boron_Ore, new ItemStack(Boron_dust,2), 5F);
GameRegistry.addSmelting(Neodymium_ore, new ItemStack(Neodymium_dust,2), 3F);
GameRegistry.addSmelting(Septum_Magnet, new ItemStack(Septum_Magnet_Iron_Ingot), 5F);
GameRegistry.addRecipe(new ItemStack(this.SeptumMagnet_Sword), new Object[]{" # "," # "," * ",'#',this.Septum_Magnet_Iron_Ingot,'*',Items.stick
});
GameRegistry.addRecipe(new ItemStack(this.SeptumMagnet_Pickaxe), new Object[]{"###"," * "," * ",'#',this.Septum_Magnet_Iron_Ingot,'*',Items.stick
});
GameRegistry.addRecipe(new ItemStack(this.Wrench), new Object[]{"# #"," # "," * ",'#',this.Septum_Magnet_Iron_Ingot,'*',Items.stick
});
GameRegistry.addRecipe(new ItemStack(this.SeptumMagnet_Hoe), new Object[]{"## "," * "," * ",'#',this.Septum_Magnet_Iron_Ingot,'*',Items.stick
});
GameRegistry.addRecipe(new ItemStack(this.SeptumMagnet_Hoe), new Object[]{" ##"," * "," * ",'#',this.Septum_Magnet_Iron_Ingot,'*',Items.stick
});
GameRegistry.addRecipe(new ItemStack(this.SeptumMagnet_Axe), new Object[]{"## ","#* "," * ",'#',this.Septum_Magnet_Iron_Ingot,'*',Items.stick
});
GameRegistry.addRecipe(new ItemStack(this.SeptumMagnet_Axe), new Object[]{"## "," *#"," * ",'#',this.Septum_Magnet_Iron_Ingot,'*',Items.stick
});
GameRegistry.addRecipe(new ItemStack(this.SeptumMagnet_Shovel), new Object[]{" # "," * "," * ",'#',this.Septum_Magnet_Iron_Ingot,'*',Items.stick
});
GameRegistry.addRecipe(new ItemStack(this.Machine_MagneticFurnace), new Object[] {
"#n#","# #","#s#",'#',this.Septum_Magnet_Iron_Ingot,'n',this.Magnet_N,'s',this.Magnet_S
});
GameRegistry.addRecipe(new ItemStack(this.Machine_MagneticFurnace), new Object[] {
"#s#","# #","#n#",'#',this.Septum_Magnet_Iron_Ingot,'n',this.Magnet_N,'s',this.Magnet_S
});
}
public void RegisterCreativeTabs(){
Septum_Magnet_Iron_Ingot = new ItemSeptumMagnetIronIngot();
GameRegistry.registerItem(Septum_Magnet_Iron_Ingot, "septummagnetironingot");
MT_BASE = new CreativeTabs("MT-BASE"){
@Override
public Item getTabIconItem() {
return Septum_Magnet_Iron_Ingot;
}};
}
@SubscribeEvent
public void onUse(PlayerInteractEvent event){
if(event.action == Action.RIGHT_CLICK_BLOCK){
if(machines.contains(event.world.getBlock(event.x, event.y, event.z).getUnlocalizedName()))
if(event.entityPlayer.isSneaking() == true){
if(event.entityPlayer.inventory.getCurrentItem() != null){
if(event.entityPlayer.inventory.getCurrentItem().getUnlocalizedName().equals(this.Wrench.getUnlocalizedName())){
Block block = event.world.getBlock(event.x, event.y, event.z);
event.world.setBlock(event.x, event.y, event.z, Blocks.air);
event.entityPlayer.inventory.addItemStackToInventory(new ItemStack(block));
event.entityPlayer.swingItem();
}else{
Block block = event.world.getBlock(event.x, event.y, event.z);
event.entityPlayer.swingItem();
}
}
// }else{
// Block block = event.world.getBlock(event.x, event.y, event.z);
// if(block.getUnlocalizedName().equals("magnetictech:magnetic_furnace")){
// Minecraft.getMinecraft().displayGuiScreen(new MagneticFurnaceGui(event.entityPlayer.inventory,((TileEntityFurnace)event.world.getTileEntity(event.x,event.y , event.z))));
// }
}
}
}
@SubscribeEvent
public void playerName(RenderGameOverlayEvent.Text event) {
event.left.add(0, String.format("x:" + event.mouseX + ",y:" + event.mouseY));
}
}
| 44.370044
| 175
| 0.78336
|
c50744e313c2bf545b5d2904ab3ca4e884ad2c56
| 7,514
|
/*
* Copyright 2007 - 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sf.jailer.util;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.List;
/**
* Parser for CSV-files.
*
* @author Ralf Wisser
*/
public class CsvFile {
/**
* A line in a CSV-file.
*/
public static class Line {
/**
* Describes the position of the line in a file.
*/
public final String location;
/**
* The cells.
*/
public final List<String> cells;
/**
* Length of the line.
*/
public int length;
/**
* Constructor.
*
* @param location describes the position of the line in a file
* @param cells the cells
*/
public Line(String location, List<String> cells) {
this.location = location;
this.cells = cells;
int num = 0;
int l = 0;
for (String s : cells) {
++num;
if (s != null && s.trim().length() > 0) {
l = num;
}
}
this.length = l;
}
/**
* Line as String.
*/
public String toString() {
int num = 0;
int l = 0;
for (String s : cells) {
++num;
if (s != null && s.trim().length() > 0) {
l = num;
}
}
StringBuffer sb = new StringBuffer();
if (l >= cells.size()) {
l = cells.size() - 1;
}
for (int i = 0; i <= l; ++i) {
sb.append(encodeCell(cells.get(i)) + "; ");
}
return sb.toString();
}
};
public static interface LineFilter {
boolean accept(Line line);
}
/**
* List of lines.
*/
private List<Line> rows = new ArrayList<Line>();
/**
* Indicates start of block inside a CSV file.
*/
public static String BLOCK_INDICATOR = "#! block ";
/**
* Constructor.
*
* @param csvFile the csv file
*/
public CsvFile(File csvFile) throws Exception {
this(csvFile, null, null);
}
/**
* Constructor.
*
* @param csvFile the csv file
*/
public CsvFile(File csvFile, LineFilter filter) throws Exception {
this(csvFile, null, filter);
}
/**
* Constructor.
*
* @param csvFile the csv file
* @param block the block to read, <code>null</code> to read default block
*/
public CsvFile(File csvFile, String block) throws Exception {
this(csvFile, block, null);
}
/**
* Constructor.
*
* @param csvFile the csv file
* @param block the block to read, <code>null</code> to read default block
*/
public CsvFile(File csvFile, String block, LineFilter filter) throws Exception {
BufferedReader reader = new BufferedReader(new FileReader(csvFile));
String line = null;
int lineNr = 0;
boolean inBlock = block == null;
while ((line = reader.readLine()) != null) {
++lineNr;
if (line.trim().length() == 0) {
continue;
}
if (line.trim().startsWith(BLOCK_INDICATOR)) {
if (inBlock) {
break;
}
String blockName = line.trim().substring(BLOCK_INDICATOR.length()).trim();
inBlock = block.equals(blockName);
continue;
}
if (line.trim().startsWith("#include ")) {
String includeFile = line.trim().substring(9).trim();
rows.addAll(new CsvFile(new File(csvFile.getParent() + File.separator + includeFile)).rows);
continue;
}
if (line.trim().startsWith("#")) {
continue;
}
if (!inBlock) {
continue;
}
List<String> row = new ArrayList<String>();
String[] col = decodeLine(line);
for (int i = 0; i < col.length; ++i) {
String s = col[i];
row.add(s.trim());
}
while (row.size() < 100) {
row.add("");
}
Line cvsLine = new Line("line " + lineNr + ", file " + csvFile.getName(), row);
if (filter == null || filter.accept(cvsLine)) {
rows.add(cvsLine);
}
}
reader.close();
}
/**
* Decodes and splits csv-line.
*
* @param line the line to decode
* @return decoded and splitted line
*/
public static String[] decodeLine(String line) {
List<String> cells = new ArrayList<String>();
StringBuilder sb = new StringBuilder();
boolean esc = false;
for (int i = 0; i < line.length(); ++i) {
char c = line.charAt(i);
if (c == '\\') {
if (esc) {
esc = false;
} else {
esc = true;
continue;
}
}
if (!esc && c == ';') {
cells.add(sb.toString());
sb.setLength(0);
} else {
if (esc && c == 'n') {
c = '\n';
} else if (esc && c == 'r') {
c = '\r';
}
sb.append(c);
}
esc = false;
}
cells.add(sb.toString());
return (String[]) cells.toArray(new String[cells.size()]);
}
/**
* Encodes and csv-cell.
*
* @param cell the cell to encode
* @return encoded cell
*/
public static String encodeCell(String cell) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < cell.length(); ++i) {
char c = cell.charAt(i);
if (c == ';') {
sb.append("\\;");
} else if (c == '\\') {
sb.append("\\\\");
} else if (c == '\n') {
sb.append("\\n");
} else if (c == '\r') {
sb.append("\\r");
} else {
sb.append(c);
}
}
return sb.toString();
}
/**
* Gets the list of lines.
*
* @return list of lists of cell-contents
*/
public List<Line> getLines() {
return rows;
}
/**
* Checks if a certain line can be found in this file.
*
* @param the line
* @return <code>true</code> iff this file contains the line
*/
public boolean contains(String[] line) {
for (Line l: getLines()) {
boolean differenceFound = false;
int i = 0;
for (String cell: line) {
if (cell != null && !cell.equals(l.cells.get(i))) {
differenceFound = true;
break;
}
++i;
}
if (!differenceFound) {
return true;
}
}
return false;
}
}
| 26.457746
| 108
| 0.481634
|
776651e086d55367b268ed2625a66f81a15fb0ca
| 1,673
|
package ru.mail.polis.lizasold;
import org.jetbrains.annotations.NotNull;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.NoSuchElementException;
public class MyFileDAO implements MyDAO {
@NotNull
private final String dir;
@NotNull
private final String deletedDir;
public MyFileDAO(@NotNull String dir) {
this.dir = dir;
this.deletedDir = dir + "/deletedDir";
createDeletedDir();
}
@NotNull
@Override
public byte[] get(@NotNull String id) throws NoSuchElementException, IOException {
if (isDeleted(id)) {
throw new NoSuchElementException("deleted");
} else if (!isExist(id)) {
throw new NoSuchElementException("no file with id " + id);
}
return Files.readAllBytes(Paths.get(dir, id));
}
@Override
public void upsert(@NotNull String id, @NotNull byte[] value) throws IOException {
Files.write(Paths.get(dir, id), value);
if (isDeleted(id)) Files.delete(Paths.get(deletedDir, id));
}
@Override
public void delete(@NotNull String id) throws IOException {
Files.createFile(Paths.get(deletedDir, id));
}
public void createDeletedDir() {
try {
Files.createDirectory(Paths.get(deletedDir));
} catch (IOException e) {
//ignore
}
}
public boolean isExist(@NotNull final String id) throws IOException {
return Files.exists(Paths.get(dir, id));
}
public boolean isDeleted(@NotNull final String id) throws IOException {
return Files.exists(Paths.get(deletedDir, id));
}
}
| 26.140625
| 86
| 0.640167
|
472b0b811051a524c41f34c385ae9cb1bcd162d3
| 1,396
|
package com.company;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Scanner;
public class Five {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
int repeats = Integer.parseInt(scanner.nextLine());
Map<String , String> map = new LinkedHashMap<>();
for (int i = 0; i < repeats; i++) {
String[] input = scanner.nextLine().split(" ");
String name = input[1];
if (input[0].equals("register")){
String carNumber = input[2];
if (!map.containsKey(name)){
map.put(name , carNumber);
System.out.printf("%s registered %s successfully%n" , name , carNumber);
}else {
System.out.printf("ERROR: already registered with plate number %s%n" , carNumber);
}
}
if (input[0].equals("unregister")){
if (!map.containsKey(name)){
System.out.printf("ERROR: user %s not found%n" , name);
}else {
map.remove(name);
System.out.printf("%s unregistered successfully%n" , name);
}
}
}
map.forEach((k , v) -> System.out.printf("%s => %s%n" , k , v));
}
}
| 31.727273
| 103
| 0.483524
|
15f0a6c30382c31c682bd8216ac66c0c2dde88e9
| 6,986
|
package net.innectis.innplugin.player;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
*
* @author Hret
*
* This file contains different settings that can be set to players.
* <p/>
* Note: the default value (if that is enabled or disabled) is <b><u>ALWAYS</u><b>
* the situation where the flag is <b><u>NOT</u><b> set. <br/>
* This can be check with the ::isDefaultOn() value
*/
public enum PlayerSettings {
//#FORMAT_START
SADDLE (1 , PlayerGroup.GUEST , "Saddle" , false, "Sets if players are allowed to sit on you."),
HUNGER (2 , PlayerGroup.GOLDY , "Hunger" , true , "Sets if hunger should be on or off."),
DEATHMESSAGE (3 , PlayerGroup.GUEST , "Death Message" , true , "Sets if death messages should be shown."),
SHOPNOTIFICATION (4 , PlayerGroup.GUEST , "Shop Message" , true , "Sets if shop notifications should be shown."),
//
VALUTA_MESSAGE (5 , PlayerGroup.GUEST , "Valuta Messages" , true , "Toggles vT gained on ore break message."),
LOS_MESSAGE (6 , PlayerGroup.GUEST , "Line of Sight Message" , true , "Toggles the line of sight error message."),
BONUS_MESSAGE (7 , PlayerGroup.VIP , "Bonus Message" , true , "Sets if messages are sent when using bonuses."),
HEAR_MUTED (8 , PlayerGroup.MODERATOR , "Hear Muted Players" , true , "Sets if player can hear muted players."),
//
ITEM_PICKUP (9 , PlayerGroup.ADMIN , "Item Pickup" , true , "Sets if player can pickup items."),
TWE_INVUSEAGE (10 , PlayerGroup.ADMIN , "TinyWE Inventory" , true , "Sets if the player should use the inventory with TinyWE."),
INVERT_LOCALCHAT (11 , PlayerGroup.GUEST , "Invert Local Chat" , false, "When set the '@' will be used for global chat instead of local."),
EMPTY (12 , PlayerGroup.NONE , "Empty" , false, "This is a placeholder."),
//
FLIGHT (13 , PlayerGroup.MODERATOR , "Flight" , false, "Allows the player flight abilities."),
TIPS (14 , PlayerGroup.GUEST , "Tips" , true , "Allows the viewing of periodic tip messages."),
TWWAND (15 , PlayerGroup.VIP , "TinyWE Wand" , true , "Allows the use of the TinyWE axe as a wand."),
ALLOW_TP (16 , PlayerGroup.GUEST , "Allow Teleport" , true , "Allows other players to teleport to you."),
//
PVP (17, PlayerGroup.GUEST , "PvP" , false, "Allows the player to engage in player-vs-player combat."),
EMPTY_2 (18, PlayerGroup.NONE , "Empty" , false, "This is a placeholder."),
CHAT_FILTER (19, PlayerGroup.GUEST , "Chat Filter" , true , "Toggles the use of the chat filter."),
INSTANT_TP (20, PlayerGroup.GUEST , "Instant Teleport" , false, "Toggles whether teleports from others are instant."),
GOD (21, PlayerGroup.ADMIN , "God" , false, "Toggles immunity from all sources.");
//#FORMAT_END
//
private final long flagBit;
private final long id;
private final PlayerGroup group;
private final String flagName;
private final boolean defaultOn;
private final String description;
private PlayerSettings(long flagBit, PlayerGroup requiredGroup, String name, boolean defaultOn, String description) {
if (flagBit <= 0 || flagBit > 64) {
throw new RuntimeException("Bit id must be between 1 and 64 (included).");
}
this.id = flagBit;
this.flagBit = (long) Math.pow(2L, flagBit - 1);
this.defaultOn = defaultOn;
this.flagName = name;
this.group = requiredGroup;
this.description = description;
}
/**
* This checks what the default state is.
* When true, the flag needs to be turned ON to be enabled.
*/
public final boolean isDefaultOn() {
return defaultOn;
}
/**
* Gets the ID of this setting
* @return
*/
public long getId() {
return id;
}
/**
* The BIT that this setting uses
*/
public final long getSettingBit() {
return flagBit;
}
/**
* Returns the name of this setting
*/
public final String getName() {
return this.flagName;
}
/**
* The group that is required to set this setting
*/
public final PlayerGroup getRequiredGroup() {
return this.group;
}
/**
* Checks if this is a staff setting
* @return
*/
public boolean isStaffSetting() {
return getRequiredGroup().equalsOrInherits(PlayerGroup.MODERATOR);
}
/**
* An description of the setting
*/
public String getDescription() {
return description;
}
/**
* Gets a setting by its ID
* @param id
* @return
*/
public static PlayerSettings getSetting(int id) {
for (PlayerSettings setting : values()) {
if (setting.getRequiredGroup() == PlayerGroup.NONE) {
continue;
}
if (setting.getId() == id) {
return setting;
}
}
return null;
}
/**
* Looks for the setting flag with the given name
* @param name
* @return
*/
public static PlayerSettings getSetting(String name) {
for (PlayerSettings setting : values()) {
// Check if its fully disabled
if (setting.getRequiredGroup() == PlayerGroup.NONE) {
continue;
}
if (setting.getName().equalsIgnoreCase(name)) {
return setting;
}
}
return null;
}
/**
* Gets all settings in sorted order
* @return
*/
public static List<PlayerSettings> getSortedSettings() {
List<PlayerSettings> settings = new ArrayList<PlayerSettings>();
for (PlayerSettings setting : PlayerSettings.values()) {
if (setting.getRequiredGroup() == PlayerGroup.NONE) {
continue;
}
settings.add(setting);
}
Collections.sort(settings, new Comparator<PlayerSettings>() {
@Override
public int compare(PlayerSettings ps1, PlayerSettings ps2) {
return (ps1.getName().compareTo(ps2.getName()));
}
});
return settings;
}
}
| 37.358289
| 166
| 0.545233
|
71ac7475a24cf76f0380bd938dd82a33ff751746
| 1,892
|
package com.bham.bd.view.model;
import com.bham.bd.view.MenuSession;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.layout.*;
import javafx.scene.shape.Rectangle;
/**
* <h1>Background for the Main Menu</h1>
*
* <p>By default non of the menus have backgrounds, they only have a darker dim over the scene.
* This class, however, is used as a separate JavaFX node as a background for the Main Menu.</p>
*/
public class MenuBackground extends AnchorPane {
/**
* path of background
*/
private static final String BG_PATH = "img/menu/background.gif";
/**
* Constructs a Stack Pane node with an animated background and particles and initialize.
*/
public MenuBackground() {
setWidth(MenuSession.WIDTH);
setHeight(MenuSession.HEIGHT);
initBackgroundAnimation();
initParticleAnimation();
}
/**
* Creates a background image animation and adds it to the background layout.
* TODO: surround with try-catch
*/
private void initBackgroundAnimation() {
try {
ImageView bgImage = new ImageView(new Image(getClass().getClassLoader().getResourceAsStream(BG_PATH)));
bgImage.setFitWidth(getWidth());
bgImage.setFitHeight(getHeight());
getChildren().add(bgImage);
}catch (IllegalArgumentException | NullPointerException e){
e.printStackTrace();
}
}
/**
* Creates particle animation and adds it to the background layout.
* <b>NOTE:</b> to be implemented
*/
private void initParticleAnimation() {}
/**
* Represents a particle used for background animation.
* A bunch of respawning particles will be used to create fancy visual effects.
*
* <b>NOTE:</b> to be implemented
*/
private static class Particle extends Rectangle {
}
}
| 28.666667
| 111
| 0.665962
|
3d2ff61fc30323bbb15c45289b9cf742ed6bfa08
| 2,836
|
package org.sunbird.common.request.orgvalidator;
import org.sunbird.common.exception.ProjectCommonException;
import org.sunbird.common.models.util.JsonKey;
import org.sunbird.common.request.BaseRequestValidator;
import org.sunbird.common.request.Request;
import org.sunbird.common.responsecode.ResponseCode;
import java.text.MessageFormat;
import java.util.List;
/**
* this class is used to validate the request of the OrgAssignKeys Controller
* @author anmolgupta
*/
public class KeyManagementValidator extends BaseRequestValidator {
private Request request;
private KeyManagementValidator(Request request) {
this.request = request;
}
/**
* this method should be used to get the instance of the class
* @param request
* @return
*/
public static KeyManagementValidator getInstance(Request request){
return new KeyManagementValidator(request);
}
/**
* this method should be used to validate the OrgAssignKeysController request.
*/
public void validate(){
id();
signKeys();
encKeys();
}
private void id(){
validateParam(
(String) request.getRequest().get(JsonKey.ID),
ResponseCode.mandatoryParamsMissing,
JsonKey.ID);
}
private void signKeys(){
validateKeyPresence(JsonKey.SIGN_KEYS);
validateListTypeObject(JsonKey.SIGN_KEYS);
validateSize(JsonKey.SIGN_KEYS);
}
private void encKeys(){
validateKeyPresence(JsonKey.ENC_KEYS);
validateListTypeObject(JsonKey.ENC_KEYS);
validateSize(JsonKey.ENC_KEYS);
}
private void validateListTypeObject(String key){
if(!(request.get(key) instanceof List)){
throw new ProjectCommonException(
ResponseCode.dataTypeError.getErrorCode(),
MessageFormat.format(
ResponseCode.dataTypeError.getErrorMessage(), key, "List"),
ResponseCode.CLIENT_ERROR.getResponseCode());
}
}
private void validateKeyPresence(String key){
if(!request.getRequest().containsKey(key)){
throw new ProjectCommonException(
ResponseCode.mandatoryParamsMissing.getErrorCode(),
ResponseCode.mandatoryParamsMissing.getErrorMessage(),
ResponseCode.CLIENT_ERROR.getResponseCode(),key);
}
}
private void validateSize(String key){
if(((List)request.get(key)).size()==0){
throw new ProjectCommonException(
ResponseCode.errorMandatoryParamsEmpty.getErrorCode(),
ResponseCode.errorMandatoryParamsEmpty.getErrorMessage(),
ResponseCode.CLIENT_ERROR.getResponseCode(),key);
}
}
}
| 30.494624
| 87
| 0.650564
|
944e54c42efe9b4a1a538e820835a621bdfea089
| 5,268
|
/*
* ******************************************************************************
* * Copyright (c) 2021 Eclipse RDF4J contributors.
* * All rights reserved. This program and the accompanying materials
* * are made available under the terms of the Eclipse Distribution License v1.0
* * which accompanies this distribution, and is available at
* * http://www.eclipse.org/org/documents/edl-v10.php.
* ******************************************************************************
*/
package org.eclipse.rdf4j.sparqlbuilder.constraint.propertypath.builder;
import static org.eclipse.rdf4j.sparqlbuilder.constraint.propertypath.builder.PropertyPaths.p;
import static org.eclipse.rdf4j.sparqlbuilder.constraint.propertypath.builder.PropertyPaths.pAlt;
import static org.eclipse.rdf4j.sparqlbuilder.constraint.propertypath.builder.PropertyPaths.pGroup;
import static org.eclipse.rdf4j.sparqlbuilder.constraint.propertypath.builder.PropertyPaths.pOneOrMore;
import static org.eclipse.rdf4j.sparqlbuilder.constraint.propertypath.builder.PropertyPaths.pSeq;
import static org.eclipse.rdf4j.sparqlbuilder.constraint.propertypath.builder.PropertyPaths.pZeroOrMore;
import static org.eclipse.rdf4j.sparqlbuilder.constraint.propertypath.builder.PropertyPaths.pZeroOrOne;
import static org.eclipse.rdf4j.sparqlbuilder.rdf.Rdf.iri;
import java.util.Objects;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import org.eclipse.rdf4j.model.IRI;
import org.eclipse.rdf4j.sparqlbuilder.constraint.Expressions;
import org.eclipse.rdf4j.sparqlbuilder.constraint.propertypath.GroupedPath;
import org.eclipse.rdf4j.sparqlbuilder.constraint.propertypath.InversePath;
import org.eclipse.rdf4j.sparqlbuilder.constraint.propertypath.PropertyPath;
import org.eclipse.rdf4j.sparqlbuilder.rdf.Iri;
/**
* @since 4.0.0
* @author Florian Kleedorfer
*/
public class PropertyPathBuilder {
private PropertyPath head;
private PropertyPathBuilder() {
this.head = null;
}
PropertyPathBuilder(Iri predicate) {
this.head = p(predicate);
}
PropertyPathBuilder(IRI predicate) {
this(iri(predicate));
}
public static PropertyPathBuilder of(Iri predicate) {
return new PropertyPathBuilder(predicate);
}
public static PropertyPathBuilder of(IRI predicate) {
return new PropertyPathBuilder(predicate);
}
/**
* Build the path.
*
* @return
*/
public PropertyPath build() {
return head;
}
/**
* Invert whatever comes next (i.e. append <code>^</code>.
*/
public PropertyPathBuilder inv() {
Objects.requireNonNull(head);
head = new InversePath(groupIfNotGrouped(head));
return this;
}
private PropertyPath groupIfNotGrouped(PropertyPath path) {
if (head instanceof GroupedPath) {
return path;
}
return new GroupedPath(path);
}
/**
* Append <code>`/` predicate</code> to the path.
*/
public PropertyPathBuilder then(Iri predicate) {
return then(p(predicate));
}
/**
* Append <code>`/` path</code> to the path.
*/
public PropertyPathBuilder then(IRI predicate) {
return then(iri(predicate));
}
/**
* Append <code>`/` path</code> to the path.
*/
public PropertyPathBuilder then(PropertyPath path) {
Objects.requireNonNull(head);
head = pSeq(head, path);
return this;
}
/**
* Append <code>`/`</code> and the product of the <code>subtreeBuilder</code> to the path.
*/
public PropertyPathBuilder then(Consumer<EmptyPropertyPathBuilder> subtreeBuilder) {
return withSubtree(subtreeBuilder, PropertyPaths::pSeq);
}
private PropertyPathBuilder withSubtree(
Consumer<EmptyPropertyPathBuilder> subtreeBuilder,
BiFunction<PropertyPath, PropertyPath, PropertyPath> assembler) {
Objects.requireNonNull(head);
EmptyPropertyPathBuilder b = new EmptyPropertyPathBuilder();
subtreeBuilder.accept(b);
head = assembler.apply(head, b.build());
return this;
}
/**
* Append <code>`|` predicate</code> to the path.
*/
public PropertyPathBuilder or(Iri predicate) {
return or(p(predicate));
}
/**
* Append <code>`|` path</code> to the path.
*/
public PropertyPathBuilder or(IRI predicate) {
return or(iri(predicate));
}
/**
* Append <code>`|` path</code> to the path.
*/
public PropertyPathBuilder or(PropertyPath path) {
Objects.requireNonNull(head);
head = pAlt(head, path);
return this;
}
/**
* Append <code>`|`</code> and the product of the <code>subtreeBuilder</code> to the path.
*/
public PropertyPathBuilder or(Consumer<EmptyPropertyPathBuilder> subtreeBuilder) {
return withSubtree(subtreeBuilder, PropertyPaths::pAlt);
}
/**
* Append <code>`*`</code> to the path.
*/
public PropertyPathBuilder zeroOrMore() {
Objects.requireNonNull(head);
head = pZeroOrMore(head);
return this;
}
/**
* Append <code>`+`</code> to the path.
*/
public PropertyPathBuilder oneOrMore() {
Objects.requireNonNull(head);
head = pOneOrMore(head);
return this;
}
/**
* Append <code>`?`</code> to the path.
*/
public PropertyPathBuilder zeroOrOne() {
Objects.requireNonNull(head);
head = pZeroOrOne(head);
return this;
}
/**
* Enclose the path with <code>`(` and `)`</code>.
*/
public PropertyPathBuilder group() {
Objects.requireNonNull(head);
head = pGroup(head);
return this;
}
}
| 27.4375
| 104
| 0.713174
|
b83ff98d2e2b5d014fef2c91768a367b77c56bb9
| 12,899
|
package ru.lanbilling.webservice.wsdl;
import javax.annotation.Generated;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for soapGenOrderData complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="soapGenOrderData">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="oper" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="uid" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="grp" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="ugrp" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="agrmid" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="docid" type="{http://www.w3.org/2001/XMLSchema}long"/>
* <element name="period" type="{http://www.w3.org/2001/XMLSchema}long"/>
* <element name="num" type="{http://www.w3.org/2001/XMLSchema}long"/>
* <element name="date" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="groupcnt" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="groupidx" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="summ" type="{http://www.w3.org/2001/XMLSchema}double" minOccurs="0"/>
* <element name="comment" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "soapGenOrderData", propOrder = {
"oper",
"uid",
"grp",
"ugrp",
"agrmid",
"docid",
"period",
"num",
"date",
"groupcnt",
"groupidx",
"summ",
"comment"
})
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public class SoapGenOrderData {
@XmlElement(defaultValue = "0")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected Long oper;
@XmlElement(defaultValue = "0")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected Long uid;
@XmlElement(defaultValue = "0")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected Long grp;
@XmlElement(defaultValue = "0")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected Long ugrp;
@XmlElement(defaultValue = "0")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected Long agrmid;
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected long docid;
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected long period;
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected long num;
@XmlElement(defaultValue = "0")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected Long date;
@XmlElement(defaultValue = "0")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected Long groupcnt;
@XmlElement(defaultValue = "0")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected Long groupidx;
@XmlElement(defaultValue = "0.000000")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected Double summ;
@XmlElement(defaultValue = "")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected String comment;
/**
* Gets the value of the oper property.
*
* @return
* possible object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public Long getOper() {
return oper;
}
/**
* Sets the value of the oper property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setOper(Long value) {
this.oper = value;
}
/**
* Gets the value of the uid property.
*
* @return
* possible object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public Long getUid() {
return uid;
}
/**
* Sets the value of the uid property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setUid(Long value) {
this.uid = value;
}
/**
* Gets the value of the grp property.
*
* @return
* possible object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public Long getGrp() {
return grp;
}
/**
* Sets the value of the grp property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setGrp(Long value) {
this.grp = value;
}
/**
* Gets the value of the ugrp property.
*
* @return
* possible object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public Long getUgrp() {
return ugrp;
}
/**
* Sets the value of the ugrp property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setUgrp(Long value) {
this.ugrp = value;
}
/**
* Gets the value of the agrmid property.
*
* @return
* possible object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public Long getAgrmid() {
return agrmid;
}
/**
* Sets the value of the agrmid property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setAgrmid(Long value) {
this.agrmid = value;
}
/**
* Gets the value of the docid property.
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public long getDocid() {
return docid;
}
/**
* Sets the value of the docid property.
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setDocid(long value) {
this.docid = value;
}
/**
* Gets the value of the period property.
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public long getPeriod() {
return period;
}
/**
* Sets the value of the period property.
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setPeriod(long value) {
this.period = value;
}
/**
* Gets the value of the num property.
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public long getNum() {
return num;
}
/**
* Sets the value of the num property.
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setNum(long value) {
this.num = value;
}
/**
* Gets the value of the date property.
*
* @return
* possible object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public Long getDate() {
return date;
}
/**
* Sets the value of the date property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setDate(Long value) {
this.date = value;
}
/**
* Gets the value of the groupcnt property.
*
* @return
* possible object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public Long getGroupcnt() {
return groupcnt;
}
/**
* Sets the value of the groupcnt property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setGroupcnt(Long value) {
this.groupcnt = value;
}
/**
* Gets the value of the groupidx property.
*
* @return
* possible object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public Long getGroupidx() {
return groupidx;
}
/**
* Sets the value of the groupidx property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setGroupidx(Long value) {
this.groupidx = value;
}
/**
* Gets the value of the summ property.
*
* @return
* possible object is
* {@link Double }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public Double getSumm() {
return summ;
}
/**
* Sets the value of the summ property.
*
* @param value
* allowed object is
* {@link Double }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setSumm(Double value) {
this.summ = value;
}
/**
* Gets the value of the comment property.
*
* @return
* possible object is
* {@link String }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public String getComment() {
return comment;
}
/**
* Sets the value of the comment property.
*
* @param value
* allowed object is
* {@link String }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setComment(String value) {
this.comment = value;
}
}
| 31.232446
| 116
| 0.567718
|
a303c1578926903cb71089de0ea569d0bc075712
| 8,860
|
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
public class Tools {
static double dot(String a, String b) {
double sum = 0.;
double norm1 = 0, norm2 = 0;
ArrayList<Double> vec1 = JointModel.wordVec.get(a);
ArrayList<Double> vec2 = JointModel.wordVec.get(b);
if (vec1 == null || vec2 == null)
return -0.5; // FIXME - make sure not using just plain DOT
for (int i = 0; i < vec1.size(); i++) {
sum += vec1.get(i) * vec2.get(i);
norm1 += Math.pow(vec1.get(i), 2);
norm2 += Math.pow(vec2.get(i), 2);
}
return divide(sum, (Math.sqrt(norm1) * Math.sqrt(norm2)));
}
static int getFeatureIndex(String feature, String decideAffix) {
if (!JointModel.feature2Index.containsKey(feature)) {
if (JointModel.TEST)
return -1; // if in testing phase, and feature does not exist already, do not create new
int index = JointModel.feature2Index.size();
JointModel.feature2Index.put(feature, index);
JointModel.index2Feature.add(feature);
if(JointModel.generalEmissionProbabilities.containsKey(feature))
JointModel.coarseProbabilities.put(feature, JointModel.generalEmissionProbabilities.get(feature));
else if(JointModel.suffixProbabilities.containsKey(feature) && decideAffix.equals("suffix"))
JointModel.coarseProbabilities.put(feature, JointModel.suffixProbabilities.get(feature));
else if(JointModel.prefixProbabilities.containsKey(feature) && decideAffix.equals("prefix"))
JointModel.coarseProbabilities.put(feature, JointModel.prefixProbabilities.get(feature));
else if(decideAffix.equals("stopSuffix")) {
String[] parts = feature.split("\\|STP_E_");
String tag = parts[0];
String affix = parts[1];
String suffixFeature = tag + "|SUFFIX_" + affix;
if(JointModel.suffixProbabilities.containsKey(suffixFeature)) {
JointModel.coarseProbabilities.put(feature, JointModel.suffixProbabilities.get(suffixFeature));
}
else {
JointModel.coarseProbabilities.put(feature, 0.00000000001);
}
}
else if(decideAffix.equals("stopPrefix")) {
String[] parts = feature.split("\\|STP_B_");
String tag = parts[0];
String affix = parts[1];
String prefixFeature = tag + "|PREFIX_" + affix;
if(JointModel.prefixProbabilities.containsKey(prefixFeature)) {
JointModel.coarseProbabilities.put(feature, JointModel.prefixProbabilities.get(prefixFeature));
}
else {
JointModel.coarseProbabilities.put(feature, 0.00000000001);
}
}
else {
JointModel.coarseProbabilities.put(feature, 0.00000000001);
}
return index;
}
return JointModel.feature2Index.get(feature);
}
static void addFeature(HashMap<Integer, Double> features, String newFeature, double value, String decide, String decideAffix) {
int featureIndex;
if (decide.equals("tagDependent")) {
for (String tagDependentFeature : returnTagDependentFeatures(newFeature)) {
featureIndex = getFeatureIndex(tagDependentFeature, decideAffix);
if (featureIndex != -1)
features.put(featureIndex, value);
}
} else {
featureIndex = getFeatureIndex(newFeature, decideAffix);
if (featureIndex != -1)
features.put(featureIndex, value);
}
}
static ArrayList<String> returnTagDependentFeatures(String feature) {
ArrayList<String> tagDependentFeatures = new ArrayList<>();
for (String tag : JointModel.tagList) {
String newFeature = tag + "|" + feature;
tagDependentFeatures.add(newFeature);
}
return tagDependentFeatures;
}
protected static void printDoubleArray(double[] array) {
for (double d : array) {
System.out.print(d + " ");
// Main.pw.print(d + " ");
}
System.out.println();
// Main.pw.println();
}
public static double max(double[] values) {
double max = -Double.MAX_VALUE;
for (double value : values) {
if (value > max)
max = value;
}
return max;
}
public static double logSumOfExponentials(double[] xs) {
if (xs.length == 1)
return xs[0];
double max = max(xs);
double sum = 0.0;
for (double x : xs)
if (x != Double.NEGATIVE_INFINITY)
sum += Math.exp(x - max);
return max + java.lang.Math.log(sum);
}
public static double logSumOfExponentials(ArrayList<Double> x) {
double[] xs = new double[x.size()];
for (int i = 0; i < x.size(); i++)
xs[i] = x.get(i);
return logSumOfExponentials(xs);
}
// dot product of feature and weights(global)
static double featureWeightProduct(HashMap<Integer, Double> features) {
double sum = 0.;
if (features == null || features.size() == 0)
return 0.;
for (int i : features.keySet())
if (i < JointModel.coarseProbabilities.size()) {// check if weight exists for the feature
sum += features.get(i) * JointModel.coarseProbabilities.get(JointModel.index2Feature.get(i));
}
return sum;
}
// dot product of feature and weights(global)
static double featureWeightProductWithTag(HashMap<Integer, Double> features, String tag) {
double sum = 0.;
if (features == null || features.size() == 0)
return 0.;
for (int i : features.keySet()) {
String feature = JointModel.index2Feature.get(i);
if (feature.startsWith(tag + "|")) { // Tag dependent
if (i < JointModel.coarseProbabilities.size()) { // check if weight exists for the feature
sum += features.get(i) * JointModel.coarseProbabilities.get(feature);
}
}
else if(!feature.contains("|")) { // Not dependent
if (i < JointModel.coarseProbabilities.size()) { // check if weight exists for the feature
sum += features.get(i) * JointModel.coarseProbabilities.get(feature);
}
}
}
return sum;
}
/** divides two doubles. (0 / 0 = 0!) && (1 / 0 = 0!) */
public static double divide(double n, double d) {
if (n == 0 || d == 0)
return 0;
else
return n / d;
}
public static double sumDoubleListValues(Collection<Double> list) {
double sum = 0.0;
for (double d : list) {
sum += d;
}
return sum;
}
static HashMap<String, Map<String, Double>> computeAffixCorrelation(LinkedHashSet<String> affixes, char type)
throws IOException {
System.out.print("Computing affix correlation - " + type + " ...");
String[] affixArray = new String[affixes.size()];
affixArray = affixes.toArray(affixArray);
double[][] correlationMatrix = new double[affixArray.length][affixArray.length];
HashMap<String, HashSet<String>> affix2Word = new HashMap<String, HashSet<String>>();
for (String affix : affixArray) {
affix2Word.put(affix, new HashSet<String>());
for (String word : JointModel.word2Cnt.keySet())
if (type == 's' && word.endsWith(affix))
affix2Word.get(affix).add(word.substring(0, word.length() - affix.length()));
else if (type == 'p' && word.startsWith(affix))
affix2Word.get(affix).add(word.substring(affix.length()));
}
HashMap<String, Map<String, Double>> affixNeighbor = new HashMap<String, Map<String, Double>>();
for (int i = 0; i < affixArray.length; i++) {
int bestJ = 0;
double bestCorrelation = 0.;
HashMap<String, Double> neighbor2Score = new HashMap<String, Double>();
for (int j = 0; j < affixArray.length; j++)
if (i != j) {
HashSet<String> tmp = Tools.clone(affix2Word.get(affixArray[i]));
tmp.retainAll(affix2Word.get(affixArray[j]));
correlationMatrix[i][j] = ((double) tmp.size()) / affix2Word.get(affixArray[i]).size();
neighbor2Score.put(affixArray[j], correlationMatrix[i][j]);
if (correlationMatrix[i][j] > bestCorrelation) {
bestCorrelation = correlationMatrix[i][j];
bestJ = j;
}
}
affixNeighbor.put(affixArray[i], Tools.sortByValue(neighbor2Score));
}
System.out.println("done.");
return affixNeighbor;
}
public static <K, V extends Comparable<? super V>> Map<K, V> sortByValue(Map<K, V> map) {
List<Map.Entry<K, V>> list = new LinkedList<Map.Entry<K, V>>(map.entrySet());
Collections.sort(list, new Comparator<Map.Entry<K, V>>() {
public int compare(Map.Entry<K, V> o1, Map.Entry<K, V> o2) {
return -(o1.getValue()).compareTo(o2.getValue()); // change sign to make ascending
}
});
Map<K, V> result = new LinkedHashMap<K, V>();
for (Map.Entry<K, V> entry : list) {
result.put(entry.getKey(), entry.getValue());
}
return result;
}
static HashSet<String> clone(HashSet<String> map) {
HashSet<String> newMap = new HashSet<String>();
for (String key : map)
newMap.add(key);
return newMap;
}
static void incrementMap(Map<String, Double> map, String key) {
Double value = map.get(key);
if (value == null)
map.put(key, 1.);
else
map.put(key, value + 1);
}
}
| 32.101449
| 128
| 0.67912
|
76ae528ed3ffd92e8e3119d785828b62a29d80e5
| 844
|
/*
* Copyright 2007 The JA-SIG Collaborative. All rights reserved. See license
* distributed with this file and available online at
* http://www.uportal.org/license.html
*/
package org.jasig.cas.authentication.principal;
/**
* Generates a unique consistant Id based on the principal, a service, and some
* algorithm.
*
* @author Scott Battaglia
* @version $Revision: 1.1 $ $Date: 2007/04/20 19:39:31 $
* @since 3.1
*/
public interface PersistentIdGenerator {
/**
* Generates a PersistentId based on some algorithm plus the principal and
* service.
*
* @param principal the principal to generate the id for.
* @param service the service to generate the id for.
* @return the generated persistent id.
*/
String generate(Principal principal, Service service);
}
| 30.142857
| 80
| 0.675355
|
a38f77ae74ca0acc58b9b7f325cb4eee5cbf84e0
| 26,367
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.infrastructure.bulkimport.populator.client;
import org.apache.fineract.infrastructure.bulkimport.constants.ClientPersonConstants;
import org.apache.fineract.infrastructure.bulkimport.constants.TemplatePopulateImportConstants;
import org.apache.fineract.infrastructure.bulkimport.populator.AbstractWorkbookPopulator;
import org.apache.fineract.infrastructure.bulkimport.populator.OfficeSheetPopulator;
import org.apache.fineract.infrastructure.bulkimport.populator.PersonnelSheetPopulator;
import org.apache.fineract.infrastructure.codes.data.CodeValueData;
import org.apache.fineract.organisation.office.data.OfficeData;
import org.apache.poi.hssf.usermodel.HSSFDataValidationHelper;
import org.apache.poi.hssf.usermodel.HSSFSheet;
import org.apache.poi.ss.SpreadsheetVersion;
import org.apache.poi.ss.usermodel.*;
import org.apache.poi.ss.util.CellRangeAddressList;
import java.util.List;
public class ClientPersonWorkbookPopulator extends AbstractWorkbookPopulator {
private OfficeSheetPopulator officeSheetPopulator;
private PersonnelSheetPopulator personnelSheetPopulator;
private List<CodeValueData>clientTypeCodeValues;
private List<CodeValueData>genderCodeValues;
private List<CodeValueData>clientClassificationCodeValues;
private List<CodeValueData>addressTypesCodeValues;
private List<CodeValueData>stateProvinceCodeValues;
private List<CodeValueData>countryCodeValues;
private List<CodeValueData>residenceCodeValues;
public ClientPersonWorkbookPopulator(OfficeSheetPopulator officeSheetPopulator,
PersonnelSheetPopulator personnelSheetPopulator,List<CodeValueData>clientTypeCodeValues,
List<CodeValueData>genderCodeValues, List<CodeValueData>clientClassification,List<CodeValueData>addressTypesCodeValues,
List<CodeValueData>stateProvinceCodeValues,List<CodeValueData>countryCodeValues, List<CodeValueData> residenceCodeValues ) {
this.officeSheetPopulator = officeSheetPopulator;
this.personnelSheetPopulator = personnelSheetPopulator;
this.clientTypeCodeValues=clientTypeCodeValues;
this.genderCodeValues=genderCodeValues;
this.clientClassificationCodeValues=clientClassification;
this.addressTypesCodeValues=addressTypesCodeValues;
this.stateProvinceCodeValues=stateProvinceCodeValues;
this.countryCodeValues=countryCodeValues;
this.residenceCodeValues=residenceCodeValues;
}
@Override
public void populate(Workbook workbook,String dateFormat) {
Sheet clientSheet = workbook.createSheet(TemplatePopulateImportConstants.CLIENT_PERSON_SHEET_NAME);
personnelSheetPopulator.populate(workbook,dateFormat);
officeSheetPopulator.populate(workbook,dateFormat);
setLayout(clientSheet);
setOfficeDateLookupTable(clientSheet, officeSheetPopulator.getOffices(),
ClientPersonConstants.RELATIONAL_OFFICE_NAME_COL, ClientPersonConstants.RELATIONAL_OFFICE_OPENING_DATE_COL,dateFormat);
setClientDataLookupTable(clientSheet);
setRules(clientSheet,dateFormat);
}
private void setClientDataLookupTable(Sheet clientSheet) {
int rowIndex=0;
for (CodeValueData clientTypeCodeValue:clientTypeCodeValues) {
Row row =clientSheet.getRow(++rowIndex);
if(row==null)
row=clientSheet.createRow(rowIndex);
writeString(ClientPersonConstants.LOOKUP_CLIENT_TYPES_COL,row,clientTypeCodeValue.getName()+
"-"+clientTypeCodeValue.getId());
}
rowIndex=0;
for (CodeValueData clientClassificationCodeValue:clientClassificationCodeValues) {
Row row =clientSheet.getRow(++rowIndex);
if(row==null)
row=clientSheet.createRow(rowIndex);
writeString(ClientPersonConstants.LOOKUP_CLIENT_CLASSIFICATION_COL,row,
clientClassificationCodeValue.getName()+"-"+clientClassificationCodeValue.getId());
}
rowIndex=0;
for (CodeValueData genderCodeValue:genderCodeValues) {
Row row =clientSheet.getRow(++rowIndex);
if(row==null)
row=clientSheet.createRow(rowIndex);
writeString(ClientPersonConstants.LOOKUP_GENDER_COL,row,genderCodeValue.getName()+"-"+genderCodeValue.getId());
}
rowIndex=0;
for (CodeValueData addressTypeCodeValue:addressTypesCodeValues) {
Row row =clientSheet.getRow(++rowIndex);
if(row==null)
row=clientSheet.createRow(rowIndex);
writeString(ClientPersonConstants.LOOKUP_ADDRESS_TYPE_COL,row,
addressTypeCodeValue.getName()+"-"+addressTypeCodeValue.getId());
}
rowIndex=0;
for (CodeValueData stateCodeValue:stateProvinceCodeValues) {
Row row =clientSheet.getRow(++rowIndex);
if(row==null)
row=clientSheet.createRow(rowIndex);
writeString(ClientPersonConstants.LOOKUP_STATE_PROVINCE_COL,row,stateCodeValue.getName()+"-"+stateCodeValue.getId());
}
rowIndex=0;
for (CodeValueData countryCodeValue: countryCodeValues) {
Row row =clientSheet.getRow(++rowIndex);
if(row==null)
row=clientSheet.createRow(rowIndex);
writeString(ClientPersonConstants.LOOKUP_COUNTRY_COL,row,countryCodeValue.getName()+"-"+countryCodeValue.getId());
}
rowIndex=0;
for (CodeValueData residenceTypeCodeValue: residenceCodeValues) {
Row row = clientSheet.getRow(++rowIndex);
if(row == null)
row = clientSheet.createRow(rowIndex);
writeString(ClientPersonConstants.LOOKUP_RESIDENCE_TYPE_COL,row,residenceTypeCodeValue.getName()+"-"+residenceTypeCodeValue.getId());
}
}
private void setLayout(Sheet worksheet) {
Row rowHeader = worksheet.createRow(TemplatePopulateImportConstants.ROWHEADER_INDEX);
rowHeader.setHeight(TemplatePopulateImportConstants.ROW_HEADER_HEIGHT);
worksheet.setColumnWidth(ClientPersonConstants.FIRST_NAME_COL, TemplatePopulateImportConstants.MEDIUM_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.LAST_NAME_COL, TemplatePopulateImportConstants.MEDIUM_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.MIDDLE_NAME_COL, TemplatePopulateImportConstants.MEDIUM_COL_SIZE);
writeString(ClientPersonConstants.FIRST_NAME_COL, rowHeader, "First Name*");
writeString(ClientPersonConstants.LAST_NAME_COL, rowHeader, "Last Name*");
writeString(ClientPersonConstants.MIDDLE_NAME_COL, rowHeader, "Middle Name");
worksheet.setColumnWidth(ClientPersonConstants.OFFICE_NAME_COL, TemplatePopulateImportConstants.MEDIUM_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.STAFF_NAME_COL, TemplatePopulateImportConstants.MEDIUM_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.EXTERNAL_ID_COL, TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.SUBMITTED_ON_COL,TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.ACTIVATION_DATE_COL, TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.ACTIVE_COL, TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.MOBILE_NO_COL, TemplatePopulateImportConstants.MEDIUM_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.VOTER_ID_COL, TemplatePopulateImportConstants.MEDIUM_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.RATION_CARD_COL, TemplatePopulateImportConstants.MEDIUM_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.DOB_COL, TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.CLIENT_TYPE_COL, TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.GENDER_COL, TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.CLIENT_CLASSIFICATION_COL, TemplatePopulateImportConstants.MEDIUM_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.IS_STAFF_COL, TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.ADDRESS_ENABLED_COL,TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.ADDRESS_TYPE_COL, TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.STREET_COL, TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.ADDRESS_LINE_1_COL, TemplatePopulateImportConstants.MEDIUM_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.ADDRESS_LINE_2_COL, TemplatePopulateImportConstants.MEDIUM_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.ADDRESS_LINE_3_COL, TemplatePopulateImportConstants.MEDIUM_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.CITY_COL, TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.STATE_PROVINCE_COL, TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.COUNTRY_COL, TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.RESIDENCE_TYPE_COL, TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.POSTAL_CODE_COL, TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.IS_ACTIVE_ADDRESS_COL, TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.WARNING_COL,TemplatePopulateImportConstants.MEDIUM_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.RELATIONAL_OFFICE_NAME_COL, TemplatePopulateImportConstants.MEDIUM_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.RELATIONAL_OFFICE_OPENING_DATE_COL, TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.LOOKUP_GENDER_COL,TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.LOOKUP_CLIENT_TYPES_COL,TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.LOOKUP_CLIENT_CLASSIFICATION_COL,TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.LOOKUP_ADDRESS_TYPE_COL,TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.LOOKUP_STATE_PROVINCE_COL,TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.LOOKUP_COUNTRY_COL,TemplatePopulateImportConstants.SMALL_COL_SIZE);
worksheet.setColumnWidth(ClientPersonConstants.LOOKUP_RESIDENCE_TYPE_COL, TemplatePopulateImportConstants.SMALL_COL_SIZE);
writeString(ClientPersonConstants.OFFICE_NAME_COL, rowHeader, "Office Name*");
writeString(ClientPersonConstants.STAFF_NAME_COL, rowHeader, "Staff Name");
writeString(ClientPersonConstants.EXTERNAL_ID_COL, rowHeader, "Aadhar Card* ");
writeString(ClientPersonConstants.SUBMITTED_ON_COL,rowHeader,"Submitted On Date");
writeString(ClientPersonConstants.ACTIVATION_DATE_COL, rowHeader, "Activation date");
writeString(ClientPersonConstants.ACTIVE_COL, rowHeader, "Active*");
writeString(ClientPersonConstants.MOBILE_NO_COL, rowHeader, "Mobile number");
writeString(ClientPersonConstants.VOTER_ID_COL,rowHeader, "Voter ID");
writeString(ClientPersonConstants.RATION_CARD_COL, rowHeader, "Ration Card");
writeString(ClientPersonConstants.DOB_COL, rowHeader, "Date of Birth ");
writeString(ClientPersonConstants.CLIENT_TYPE_COL, rowHeader, "Client Type ");
writeString(ClientPersonConstants.IS_STAFF_COL, rowHeader, "Is a staff memeber ");
writeString(ClientPersonConstants.GENDER_COL, rowHeader, "Gender ");
writeString(ClientPersonConstants.ADDRESS_ENABLED_COL,rowHeader,"Address Enabled *");
writeString(ClientPersonConstants.CLIENT_CLASSIFICATION_COL, rowHeader, "Client Classification ");
writeString(ClientPersonConstants.ADDRESS_TYPE_COL, rowHeader, "Address Type ");
writeString(ClientPersonConstants.STREET_COL, rowHeader, "Street ");
writeString(ClientPersonConstants.ADDRESS_LINE_1_COL, rowHeader, "Address Line 1");
writeString(ClientPersonConstants.ADDRESS_LINE_2_COL, rowHeader, "Address Line 2");
writeString(ClientPersonConstants.ADDRESS_LINE_3_COL, rowHeader, "Address Line 3 ");
writeString(ClientPersonConstants.CITY_COL, rowHeader, "City ");
writeString(ClientPersonConstants.STATE_PROVINCE_COL, rowHeader, "State/ Province ");
writeString(ClientPersonConstants.COUNTRY_COL, rowHeader, "Country ");
writeString(ClientPersonConstants.RESIDENCE_TYPE_COL, rowHeader, "Residence Type ");
writeString(ClientPersonConstants.POSTAL_CODE_COL, rowHeader, "Postal Code ");
writeString(ClientPersonConstants.IS_ACTIVE_ADDRESS_COL, rowHeader, "Is active Address ? ");
writeString(ClientPersonConstants.WARNING_COL, rowHeader, "All * marked fields are compulsory.");
writeString(ClientPersonConstants.RELATIONAL_OFFICE_NAME_COL, rowHeader, "Lookup office Name ");
writeString(ClientPersonConstants.RELATIONAL_OFFICE_OPENING_DATE_COL, rowHeader, "Lookup Office Opened Date ");
writeString(ClientPersonConstants.LOOKUP_GENDER_COL, rowHeader, "Lookup Gender ");
writeString(ClientPersonConstants.LOOKUP_CLIENT_TYPES_COL, rowHeader, "Lookup Client Types ");
writeString(ClientPersonConstants.LOOKUP_CLIENT_CLASSIFICATION_COL, rowHeader, "Lookup Client Classification ");
writeString(ClientPersonConstants.LOOKUP_ADDRESS_TYPE_COL, rowHeader, "Lookup AddressType ");
writeString(ClientPersonConstants.LOOKUP_STATE_PROVINCE_COL, rowHeader, "Lookup State/Province ");
writeString(ClientPersonConstants.LOOKUP_COUNTRY_COL, rowHeader, "Lookup Country ");
writeString(ClientPersonConstants.LOOKUP_RESIDENCE_TYPE_COL, rowHeader, "Lookup ResidenceType");
}
private void setRules(Sheet worksheet,String dateformat) {
CellRangeAddressList officeNameRange = new CellRangeAddressList(1,
SpreadsheetVersion.EXCEL97.getLastRowIndex(), ClientPersonConstants.OFFICE_NAME_COL, ClientPersonConstants.OFFICE_NAME_COL);
CellRangeAddressList staffNameRange = new CellRangeAddressList(1,
SpreadsheetVersion.EXCEL97.getLastRowIndex(),ClientPersonConstants. STAFF_NAME_COL,ClientPersonConstants. STAFF_NAME_COL);
CellRangeAddressList submittedOnDateRange = new CellRangeAddressList(1,
SpreadsheetVersion.EXCEL97.getLastRowIndex(),ClientPersonConstants. SUBMITTED_ON_COL, ClientPersonConstants.SUBMITTED_ON_COL);
CellRangeAddressList activationDateRange = new CellRangeAddressList(1,
SpreadsheetVersion.EXCEL97.getLastRowIndex(), ClientPersonConstants.ACTIVATION_DATE_COL, ClientPersonConstants.ACTIVATION_DATE_COL);
CellRangeAddressList activeRange = new CellRangeAddressList(1,
SpreadsheetVersion.EXCEL97.getLastRowIndex(), ClientPersonConstants.ACTIVE_COL,ClientPersonConstants. ACTIVE_COL);
CellRangeAddressList clientTypeRange=new CellRangeAddressList(1,
SpreadsheetVersion.EXCEL97.getLastRowIndex(),ClientPersonConstants. CLIENT_TYPE_COL,ClientPersonConstants. CLIENT_TYPE_COL);
CellRangeAddressList dobRange=new CellRangeAddressList(1,
SpreadsheetVersion.EXCEL97.getLastRowIndex(),ClientPersonConstants. DOB_COL,ClientPersonConstants. DOB_COL);
CellRangeAddressList isStaffRange=new CellRangeAddressList(1,
SpreadsheetVersion.EXCEL97.getLastRowIndex(),ClientPersonConstants. IS_STAFF_COL,ClientPersonConstants. IS_STAFF_COL);
CellRangeAddressList genderRange=new CellRangeAddressList(1,
SpreadsheetVersion.EXCEL97.getLastRowIndex(), ClientPersonConstants.GENDER_COL,ClientPersonConstants. GENDER_COL);
CellRangeAddressList clientClassificationRange=new CellRangeAddressList(1,
SpreadsheetVersion.EXCEL97.getLastRowIndex(), ClientPersonConstants.CLIENT_CLASSIFICATION_COL, ClientPersonConstants.CLIENT_CLASSIFICATION_COL);
CellRangeAddressList enabledAddressRange=new CellRangeAddressList(1,
SpreadsheetVersion.EXCEL97.getLastRowIndex(), ClientPersonConstants.ADDRESS_ENABLED_COL, ClientPersonConstants.ADDRESS_ENABLED_COL);
CellRangeAddressList addressTypeRange=new CellRangeAddressList(1,
SpreadsheetVersion.EXCEL97.getLastRowIndex(),ClientPersonConstants. ADDRESS_TYPE_COL, ClientPersonConstants.ADDRESS_TYPE_COL);
CellRangeAddressList stateProvinceRange=new CellRangeAddressList(1,
SpreadsheetVersion.EXCEL97.getLastRowIndex(),ClientPersonConstants. STATE_PROVINCE_COL, ClientPersonConstants.STATE_PROVINCE_COL);
CellRangeAddressList countryRange=new CellRangeAddressList(1,
SpreadsheetVersion.EXCEL97.getLastRowIndex(), ClientPersonConstants.COUNTRY_COL, ClientPersonConstants.COUNTRY_COL);
CellRangeAddressList residenceTypeRange= new CellRangeAddressList(1,
SpreadsheetVersion.EXCEL97.getLastRowIndex(),ClientPersonConstants.RESIDENCE_TYPE_COL, ClientPersonConstants.RESIDENCE_TYPE_COL);
CellRangeAddressList activeAddressRange=new CellRangeAddressList(1,
SpreadsheetVersion.EXCEL97.getLastRowIndex(),ClientPersonConstants. IS_ACTIVE_ADDRESS_COL,ClientPersonConstants. IS_ACTIVE_ADDRESS_COL);
DataValidationHelper validationHelper = new HSSFDataValidationHelper((HSSFSheet) worksheet);
List<OfficeData> offices = officeSheetPopulator.getOffices();
setNames(worksheet, offices);
DataValidationConstraint officeNameConstraint =
validationHelper.createFormulaListConstraint("Office");
DataValidationConstraint staffNameConstraint =
validationHelper.createFormulaListConstraint("INDIRECT(CONCATENATE(\"Staff_\",$D1))");
DataValidationConstraint submittedOnDateConstraint =
validationHelper.createDateConstraint(DataValidationConstraint.OperatorType.LESS_OR_EQUAL,
"=$I1" ,null,dateformat);
DataValidationConstraint activationDateConstraint =
validationHelper.createDateConstraint(DataValidationConstraint.OperatorType.BETWEEN,
"=VLOOKUP($D1,$AJ$2:$AK" + (offices.size() + 1) + ",2,FALSE)", "=TODAY()", dateformat);
DataValidationConstraint dobDateConstraint =
validationHelper.createDateConstraint(DataValidationConstraint.OperatorType.LESS_OR_EQUAL,
"=TODAY()",null, dateformat);
DataValidationConstraint activeConstraint =
validationHelper.createExplicitListConstraint(new String[] {"True", "False"});
DataValidationConstraint clientTypesConstraint =
validationHelper.createFormulaListConstraint("ClientTypes");
DataValidationConstraint isStaffConstraint =
validationHelper.createExplicitListConstraint(new String[] {"True", "False"});
DataValidationConstraint genderConstraint =
validationHelper.createFormulaListConstraint("Gender");
DataValidationConstraint clientClassificationConstraint =
validationHelper.createFormulaListConstraint("ClientClassification");
DataValidationConstraint enabledAddressConstraint =
validationHelper.createExplicitListConstraint(new String[] {"True", "False"});
DataValidationConstraint addressTypeConstraint =
validationHelper.createFormulaListConstraint("AddressType");
DataValidationConstraint stateProvinceConstraint =
validationHelper.createFormulaListConstraint("StateProvince");
DataValidationConstraint countryConstraint =
validationHelper.createFormulaListConstraint("Country");
DataValidationConstraint residenceTypeConstraint =
validationHelper.createFormulaListConstraint("ResidenceType");
DataValidationConstraint activeAddressConstraint =
validationHelper.createExplicitListConstraint(new String[] {"True", "False"});
DataValidation officeValidation =
validationHelper.createValidation(officeNameConstraint, officeNameRange);
DataValidation staffValidation =
validationHelper.createValidation(staffNameConstraint, staffNameRange);
DataValidation submittedOnDateValidation =
validationHelper.createValidation(submittedOnDateConstraint, submittedOnDateRange);
DataValidation activationDateValidation =
validationHelper.createValidation(activationDateConstraint, activationDateRange);
DataValidation dobDateValidation =
validationHelper.createValidation(dobDateConstraint, dobRange);
DataValidation activeValidation =
validationHelper.createValidation(activeConstraint, activeRange);
DataValidation clientTypeValidation =
validationHelper.createValidation(clientTypesConstraint, clientTypeRange);
DataValidation isStaffValidation =
validationHelper.createValidation(isStaffConstraint, isStaffRange);
DataValidation genderValidation =
validationHelper.createValidation(genderConstraint, genderRange);
DataValidation clientClassificationValidation =
validationHelper.createValidation(clientClassificationConstraint, clientClassificationRange);
DataValidation enabledAddressValidation=
validationHelper.createValidation(enabledAddressConstraint,enabledAddressRange);
DataValidation addressTypeValidation =
validationHelper.createValidation(addressTypeConstraint, addressTypeRange);
DataValidation stateProvinceValidation =
validationHelper.createValidation(stateProvinceConstraint, stateProvinceRange);
DataValidation countryValidation =
validationHelper.createValidation(countryConstraint, countryRange);
DataValidation residenceTypeValidation =
validationHelper.createValidation(residenceTypeConstraint, residenceTypeRange);
DataValidation activeAddressValidation =
validationHelper.createValidation(activeAddressConstraint,activeAddressRange);
worksheet.addValidationData(activeValidation);
worksheet.addValidationData(officeValidation);
worksheet.addValidationData(staffValidation);
worksheet.addValidationData(activationDateValidation);
worksheet.addValidationData(submittedOnDateValidation);
worksheet.addValidationData(dobDateValidation);
worksheet.addValidationData(clientTypeValidation);
worksheet.addValidationData(isStaffValidation);
worksheet.addValidationData(genderValidation);
worksheet.addValidationData(clientClassificationValidation);
worksheet.addValidationData(enabledAddressValidation);
worksheet.addValidationData(addressTypeValidation);
worksheet.addValidationData(stateProvinceValidation);
worksheet.addValidationData(countryValidation);
worksheet.addValidationData(residenceTypeValidation);
worksheet.addValidationData(activeAddressValidation);
}
private void setNames(Sheet worksheet, List<OfficeData> offices) {
Workbook clientWorkbook = worksheet.getWorkbook();
Name officeGroup = clientWorkbook.createName();
officeGroup.setNameName("Office");
officeGroup.setRefersToFormula(TemplatePopulateImportConstants.OFFICE_SHEET_NAME+"!$B$2:$B$" + (offices.size() + 1));
Name clientTypeGroup = clientWorkbook.createName();
clientTypeGroup.setNameName("ClientTypes");
clientTypeGroup.setRefersToFormula(TemplatePopulateImportConstants.CLIENT_PERSON_SHEET_NAME+"!$AN$2:$AN$" +
(clientTypeCodeValues.size() + 1));
Name genderGroup = clientWorkbook.createName();
genderGroup.setNameName("Gender");
genderGroup.setRefersToFormula(TemplatePopulateImportConstants.CLIENT_PERSON_SHEET_NAME+"!$AL$2:$AL$" + (genderCodeValues.size() + 1));
Name clientClassficationGroup = clientWorkbook.createName();
clientClassficationGroup.setNameName("ClientClassification");
clientClassficationGroup.setRefersToFormula(TemplatePopulateImportConstants.CLIENT_PERSON_SHEET_NAME+"!$AM$2:$AM$" +
(clientClassificationCodeValues.size() + 1));
Name addressTypeGroup = clientWorkbook.createName();
addressTypeGroup.setNameName("AddressType");
addressTypeGroup.setRefersToFormula(TemplatePopulateImportConstants.CLIENT_PERSON_SHEET_NAME+"!$AO$2:$AO$" +
(addressTypesCodeValues.size() + 1));
Name stateProvinceGroup = clientWorkbook.createName();
stateProvinceGroup.setNameName("StateProvince");
stateProvinceGroup.setRefersToFormula(TemplatePopulateImportConstants.CLIENT_PERSON_SHEET_NAME+"!$AP$2:$AP$" +
(stateProvinceCodeValues.size() + 1));
Name countryGroup = clientWorkbook.createName();
countryGroup.setNameName("Country");
countryGroup.setRefersToFormula(TemplatePopulateImportConstants.CLIENT_PERSON_SHEET_NAME+"!$AQ$2:$AQ$" +
(countryCodeValues.size() + 1));
Name residenceTypeGroup = clientWorkbook.createName();
residenceTypeGroup.setNameName("ResidenceType");
residenceTypeGroup.setRefersToFormula(TemplatePopulateImportConstants.CLIENT_PERSON_SHEET_NAME+"!$AR$2:$AR$" +
(residenceCodeValues.size() + 1));
for (Integer i = 0; i < offices.size(); i++) {
Integer[] officeNameToBeginEndIndexesOfStaff =
personnelSheetPopulator.getOfficeNameToBeginEndIndexesOfStaff().get(i);
if (officeNameToBeginEndIndexesOfStaff != null) {
Name name = clientWorkbook.createName();
name.setNameName("Staff_" + offices.get(i).name().trim().replaceAll("[ )(]", "_"));
name.setRefersToFormula(TemplatePopulateImportConstants.STAFF_SHEET_NAME+"!$B$" +
officeNameToBeginEndIndexesOfStaff[0] + ":$B$" + officeNameToBeginEndIndexesOfStaff[1]);
}
}
}
}
| 65.753117
| 156
| 0.80669
|
901ab4434332359bb56e89aad37f2255a0e6b2bd
| 4,497
|
package com.yooksi.fierysouls.block;
import javax.annotation.Nullable;
import com.yooksi.fierysouls.entity.item.EntityItemTorch;
import com.yooksi.fierysouls.item.ItemTorch;
import com.yooksi.fierysouls.tileentity.TileEntityTorch;
import net.minecraft.block.Block;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.item.EntityItem;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.stats.StatList;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.World;
public abstract class BlockTorch extends net.minecraft.block.BlockTorch
{
/**
* Drop the block as item when harvested by player. <br>
* Called when the player destroys the block by left-clicking it in survival mode. <p>
*
* <i>Note: This method is not triggered in creative mode.</i>
*
* @param worldIn The world instance where the player is harvesting the block
* @param player EntityPlayer harvesting the block
* @param pos Position of the block that's being harvested
* @param state Current state of the block
* @param te TileEntity that belongs to this block
*/
@Override
public void harvestBlock(World worldIn, EntityPlayer player, BlockPos pos, IBlockState state, @Nullable TileEntity te, @Nullable ItemStack stack)
{
player.addStat(StatList.getBlockStats(this));
player.addExhaustion(0.025F);
if (te != null && te instanceof TileEntityTorch)
{
TileEntityTorch torchEntity = (TileEntityTorch)te;
java.util.List<ItemStack> items = new java.util.ArrayList<ItemStack>();
ItemStack itemstack = this.createStackedBlock(state);
if (itemstack != null)
items.add(itemstack);
for (ItemStack item : items)
{
// This is where we pass our custom NBT from TileEntity to the new ItemStack.
if (ItemTorch.isItemTorch(item.getItem(), false) && torchEntity != null)
ItemTorch.createCustomItemNBTFromExisting(item, worldIn, torchEntity.saveDataToPacket());
spawnAsEntity(worldIn, pos, item);
}
}
}
/**
* Will be called when a block beneath the block is broken and the item is forced to drop. <br>
* For regular drops we should use {@link #harvestBlock}.
*/
@Override
public void dropBlockAsItemWithChance(World worldIn, BlockPos pos, IBlockState state, float chance, int fortune)
{
TileEntityTorch teTorch = (TileEntityTorch) TileEntityTorch.findTorchTileEntity(worldIn, pos, true);
if (!worldIn.restoringBlockSnapshots) // do not drop items while restoring blockstates, prevents item dupe
{
java.util.List<ItemStack> items = getDrops(worldIn, pos, state, fortune);
chance = net.minecraftforge.event.ForgeEventFactory.fireBlockHarvesting(items, worldIn, pos, state, fortune, chance, false, harvesters.get());
for (ItemStack item : items)
{
// This is where we pass our custom NBT from TileEntity to the new ItemStack.
if (ItemTorch.isItemTorch(item.getItem(), false) && teTorch != null)
{
ItemTorch.createCustomItemNBTFromExisting(item, worldIn, teTorch.saveDataToPacket());
if (worldIn.rand.nextFloat() <= chance)
spawnAsTorchEntity(worldIn, pos, item);
}
else if (worldIn.rand.nextFloat() <= chance)
spawnAsEntity(worldIn, pos, item);
}
}
}
/**
* A slightly modified version of {@link Block#spawnAsEntity} designed to create <br>
* and spawn our own custom torch entity.
*/
private void spawnAsTorchEntity(World worldIn, BlockPos pos, ItemStack stack)
{
if (!worldIn.isRemote && worldIn.getGameRules().getBoolean("doTileDrops") && !worldIn.restoringBlockSnapshots) // do not drop items while restoring blockstates, prevents item dupe
{
if (!captureDrops.get())
{
double d0 = (double)(worldIn.rand.nextFloat() * 0.5F) + 0.25D;
double d1 = (double)(worldIn.rand.nextFloat() * 0.5F) + 0.25D;
double d2 = (double)(worldIn.rand.nextFloat() * 0.5F) + 0.25D;
EntityItem entityitem = new EntityItemTorch(worldIn, (double)pos.getX() + d0, (double)pos.getY() + d1, (double)pos.getZ() + d2, stack);
entityitem.setDefaultPickupDelay();
worldIn.spawnEntityInWorld(entityitem);
}
else capturedDrops.get().add(stack);
}
}
}
| 41.256881
| 181
| 0.688459
|
8ed4d2dc13edb3e4eff8ace13bf0a88325e53ad1
| 849
|
package org.pipecraft.infra.sets;
import java.util.Random;
/**
* A random sampler on a stream of a known size.
*
* @author Eyal Schneider
*/
public class StreamSampler {
private final int size;
private int toSample;
private int visited;
private final Random rnd;
/**
* Constructor
*
* @param streamSize The total size of the stream.
* @param toSample The number of items to be sampled
*/
public StreamSampler(Random rnd, int streamSize, int toSample) {
this.size = streamSize;
this.toSample = toSample;
this.rnd = rnd;
}
/**
* To be called per visited item.
* @return
*/
public boolean accept() {
boolean accepted = false;
if (rnd.nextDouble() < ((double)toSample) / (size - visited)){
toSample--;
accepted = true;
}
visited++;
return accepted;
}
}
| 20.214286
| 66
| 0.636042
|
2bc3310a6aab7440bb9ea33a7d12b1e8587fbfd8
| 1,825
|
/*
* IzPack - Copyright 2001-2012 Julien Ponge, All Rights Reserved.
*
* http://izpack.org/
* http://izpack.codehaus.org/
*
* Copyright 2012 Tim Anderson
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.izforge.izpack.panels.userinput.field.radio;
import com.izforge.izpack.panels.userinput.field.Choice;
/**
* Radio button choice.
*
* @author Tim Anderson
*/
public class RadioChoice extends Choice
{
/**
* Determines if the choice selection triggers re-validation.
*/
private final boolean revalidate;
/**
* Constructs a {@code RadioChoice}.
*
* @param key the key
* @param value the display value
* @param revalidate if {@code true}, choice selection triggers re-validation
*/
public RadioChoice(String key, String value, boolean revalidate)
{
super(key, value);
this.revalidate = revalidate;
}
/**
* Returns the 'true' value.
*
* @return the 'true' value
*/
public String getTrueValue()
{
return getKey();
}
/**
* Determines if the choice selection triggers re-validation.
*
* @return {@code true}, choice selection triggers re-validation
*/
public boolean getRevalidate()
{
return revalidate;
}
}
| 25
| 81
| 0.656986
|
d205f3eb9e30433e2dfe8b5e04165862cf9fb3a7
| 9,493
|
package cucumber.runtime;
import cucumber.api.PendingException;
import cucumber.runtime.formatter.StepMatcher;
import cucumber.runtime.io.ClasspathResourceLoader;
import cucumber.runtime.io.Resource;
import cucumber.runtime.model.CucumberFeature;
import gherkin.I18n;
import gherkin.formatter.Formatter;
import gherkin.formatter.Reporter;
import gherkin.formatter.model.Step;
import gherkin.formatter.model.Tag;
import junit.framework.AssertionFailedError;
import org.junit.Ignore;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.io.*;
import java.util.AbstractMap.SimpleEntry;
import java.util.*;
import static java.util.Arrays.asList;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyCollectionOf;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.argThat;
import static org.mockito.Mockito.*;
@Ignore
public class TestHelper {
public static CucumberFeature feature(final String path, final String source) throws IOException {
ArrayList<CucumberFeature> cucumberFeatures = new ArrayList<CucumberFeature>();
FeatureBuilder featureBuilder = new FeatureBuilder(cucumberFeatures);
featureBuilder.parse(new Resource() {
@Override
public String getPath() {
return path;
}
@Override
public InputStream getInputStream() {
try {
return new ByteArrayInputStream(source.getBytes("UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
@Override
public String getClassName() {
throw new UnsupportedOperationException();
}
}, new ArrayList<Object>());
return cucumberFeatures.get(0);
}
public static void runFeatureWithFormatter(final CucumberFeature feature, final Map<String, String> stepsToResult, final List<SimpleEntry<String, String>> hooks,
final long stepHookDuration, final Formatter formatter, final Reporter reporter) throws Throwable {
runFeaturesWithFormatter(Arrays.asList(feature), stepsToResult, Collections.<String, String>emptyMap(), hooks, stepHookDuration, formatter, reporter);
}
public static void runFeaturesWithFormatter(final List<CucumberFeature> features, final Map<String, String> stepsToResult,
final List<SimpleEntry<String, String>> hooks, final long stepHookDuration, final Formatter formatter, final Reporter reporter) throws Throwable {
runFeaturesWithFormatter(features, stepsToResult, Collections.<String, String>emptyMap(), hooks, stepHookDuration, formatter, reporter);
}
public static void runFeatureWithFormatter(final CucumberFeature feature, final Map<String, String> stepsToLocation,
final Formatter formatter, final Reporter reporter) throws Throwable {
runFeaturesWithFormatter(Arrays.asList(feature), Collections.<String, String>emptyMap(), stepsToLocation,
Collections.<SimpleEntry<String, String>>emptyList(), 0L, formatter, reporter);
}
private static void runFeaturesWithFormatter(final List<CucumberFeature> features, final Map<String, String> stepsToResult, final Map<String, String> stepsToLocation,
final List<SimpleEntry<String, String>> hooks, final long stepHookDuration, final Formatter formatter, final Reporter reporter) throws Throwable {
final RuntimeOptions runtimeOptions = new RuntimeOptions("");
final ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
final ClasspathResourceLoader resourceLoader = new ClasspathResourceLoader(classLoader);
final RuntimeGlue glue = createMockedRuntimeGlueThatMatchesTheSteps(stepsToResult, stepsToLocation, hooks);
final Runtime runtime = new Runtime(resourceLoader, classLoader, asList(mock(Backend.class)), runtimeOptions, new StopWatch.Stub(stepHookDuration), glue);
for (CucumberFeature feature : features) {
feature.run(formatter, reporter, runtime);
}
formatter.done();
formatter.close();
}
private static RuntimeGlue createMockedRuntimeGlueThatMatchesTheSteps(Map<String, String> stepsToResult, Map<String, String> stepsToLocation,
final List<SimpleEntry<String, String>> hooks) throws Throwable {
RuntimeGlue glue = mock(RuntimeGlue.class);
TestHelper.mockSteps(glue, stepsToResult, stepsToLocation);
TestHelper.mockHooks(glue, hooks);
return glue;
}
private static void mockSteps(RuntimeGlue glue, Map<String, String> stepsToResult, Map<String, String> stepsToLocation) throws Throwable {
for (String stepName : mergeStepSets(stepsToResult, stepsToLocation)) {
String stepResult = getResultWithDefaultPassed(stepsToResult, stepName);
if (!"undefined".equals(stepResult)) {
StepDefinitionMatch matchStep = mock(StepDefinitionMatch.class);
when(glue.stepDefinitionMatch(anyString(), TestHelper.stepWithName(stepName), (I18n) any())).thenReturn(matchStep);
mockStepResult(stepResult, matchStep);
mockStepLocation(getLocationWithDefaultEmptyString(stepsToLocation, stepName), matchStep);
}
}
}
private static void mockStepResult(String stepResult, StepDefinitionMatch matchStep) throws Throwable {
if ("pending".equals(stepResult)) {
doThrow(new PendingException()).when(matchStep).runStep((I18n) any());
} else if ("failed".equals(stepResult)) {
AssertionFailedError error = TestHelper.mockAssertionFailedError();
doThrow(error).when(matchStep).runStep((I18n) any());
} else if (!"passed".equals(stepResult) &&
!"skipped".equals(stepResult)) {
fail("Cannot mock step to the result: " + stepResult);
}
}
private static void mockStepLocation(String stepLocation, StepDefinitionMatch matchStep) {
when(matchStep.getLocation()).thenReturn(stepLocation);
}
private static void mockHooks(RuntimeGlue glue, final List<SimpleEntry<String, String>> hooks) throws Throwable {
List<HookDefinition> beforeHooks = new ArrayList<HookDefinition>();
List<HookDefinition> afterHooks = new ArrayList<HookDefinition>();
for (SimpleEntry<String, String> hookEntry : hooks) {
TestHelper.mockHook(hookEntry, beforeHooks, afterHooks);
}
if (beforeHooks.size() != 0) {
when(glue.getBeforeHooks()).thenReturn(beforeHooks);
}
if (afterHooks.size() != 0) {
when(glue.getAfterHooks()).thenReturn(afterHooks);
}
}
private static void mockHook(SimpleEntry<String, String> hookEntry, List<HookDefinition> beforeHooks,
List<HookDefinition> afterHooks) throws Throwable {
HookDefinition hook = mock(HookDefinition.class);
when(hook.matches(anyCollectionOf(Tag.class))).thenReturn(true);
if (hookEntry.getValue().equals("failed")) {
AssertionFailedError error = TestHelper.mockAssertionFailedError();
doThrow(error).when(hook).execute((cucumber.api.Scenario) any());
}
if ("before".equals(hookEntry.getKey())) {
beforeHooks.add(hook);
} else if ("after".equals(hookEntry.getKey())) {
afterHooks.add(hook);
} else {
fail("Only before and after hooks are allowed, hook type found was: " + hookEntry.getKey());
}
}
private static Step stepWithName(String name) {
return argThat(new StepMatcher(name));
}
private static AssertionFailedError mockAssertionFailedError() {
AssertionFailedError error = mock(AssertionFailedError.class);
Answer<Object> printStackTraceHandler = new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
PrintWriter writer = (PrintWriter) invocation.getArguments()[0];
writer.print("the stack trace");
return null;
}
};
doAnswer(printStackTraceHandler).when(error).printStackTrace((PrintWriter) any());
return error;
}
public static SimpleEntry<String, String> hookEntry(String type, String result) {
return new SimpleEntry<String, String>(type, result);
}
private static Set<String> mergeStepSets(Map<String, String> stepsToResult, Map<String, String> stepsToLocation) {
Set<String> steps = new HashSet<String>(stepsToResult.keySet());
steps.addAll(stepsToLocation.keySet());
return steps;
}
private static String getResultWithDefaultPassed(Map<String, String> stepsToResult, String step) {
return stepsToResult.containsKey(step) ? stepsToResult.get(step) : "passed";
}
private static String getLocationWithDefaultEmptyString(Map<String, String> stepsToLocation, String step) {
return stepsToLocation.containsKey(step) ? stepsToLocation.get(step) : "";
}
}
| 49.442708
| 195
| 0.678921
|
99eec8f5ce900b2c54fa943e5e596f1ac2d698b2
| 1,137
|
package net.redpipe.engine.resteasy;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.ext.MessageBodyWriter;
import javax.ws.rs.ext.Provider;
import io.vertx.core.json.JsonObject;
@Provider
public class JsonObjectBodyWriter implements MessageBodyWriter<JsonObject>{
@Override
public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) {
return type == JsonObject.class;
}
@Override
public long getSize(JsonObject t, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) {
return t.encode().length();
}
@Override
public void writeTo(JsonObject t, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream)
throws IOException, WebApplicationException {
// TODO: encoding
entityStream.write(t.encode().getBytes("utf-8"));
}
}
| 29.921053
| 116
| 0.782762
|
e366f63b899c777ab5d58a2bcc90a095ff16d486
| 94,204
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.network.fluent;
import com.azure.core.annotation.BodyParam;
import com.azure.core.annotation.Delete;
import com.azure.core.annotation.ExpectedResponses;
import com.azure.core.annotation.Get;
import com.azure.core.annotation.Headers;
import com.azure.core.annotation.Host;
import com.azure.core.annotation.HostParam;
import com.azure.core.annotation.Patch;
import com.azure.core.annotation.PathParam;
import com.azure.core.annotation.Put;
import com.azure.core.annotation.QueryParam;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceInterface;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.annotation.UnexpectedResponseExceptionType;
import com.azure.core.http.rest.PagedFlux;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.PagedResponse;
import com.azure.core.http.rest.PagedResponseBase;
import com.azure.core.http.rest.Response;
import com.azure.core.http.rest.RestProxy;
import com.azure.core.management.exception.ManagementException;
import com.azure.core.management.polling.PollResult;
import com.azure.core.util.Context;
import com.azure.core.util.FluxUtil;
import com.azure.core.util.logging.ClientLogger;
import com.azure.core.util.polling.PollerFlux;
import com.azure.core.util.polling.SyncPoller;
import com.azure.resourcemanager.network.fluent.inner.IpAddressAvailabilityResultInner;
import com.azure.resourcemanager.network.fluent.inner.VirtualNetworkInner;
import com.azure.resourcemanager.network.fluent.inner.VirtualNetworkListResultInner;
import com.azure.resourcemanager.network.fluent.inner.VirtualNetworkListUsageResultInner;
import com.azure.resourcemanager.network.fluent.inner.VirtualNetworkUsageInner;
import com.azure.resourcemanager.network.models.TagsObject;
import com.azure.resourcemanager.resources.fluentcore.collection.InnerSupportsDelete;
import com.azure.resourcemanager.resources.fluentcore.collection.InnerSupportsGet;
import com.azure.resourcemanager.resources.fluentcore.collection.InnerSupportsListing;
import java.nio.ByteBuffer;
import java.util.Map;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
/** An instance of this class provides access to all the operations defined in VirtualNetworks. */
public final class VirtualNetworksClient
implements InnerSupportsGet<VirtualNetworkInner>,
InnerSupportsListing<VirtualNetworkInner>,
InnerSupportsDelete<Void> {
private final ClientLogger logger = new ClientLogger(VirtualNetworksClient.class);
/** The proxy service used to perform REST calls. */
private final VirtualNetworksService service;
/** The service client containing this operation class. */
private final NetworkManagementClient client;
/**
* Initializes an instance of VirtualNetworksClient.
*
* @param client the instance of the service client containing this operation class.
*/
public VirtualNetworksClient(NetworkManagementClient client) {
this.service =
RestProxy.create(VirtualNetworksService.class, client.getHttpPipeline(), client.getSerializerAdapter());
this.client = client;
}
/**
* The interface defining all the services for NetworkManagementClientVirtualNetworks to be used by the proxy
* service to perform REST calls.
*/
@Host("{$host}")
@ServiceInterface(name = "NetworkManagementCli")
private interface VirtualNetworksService {
@Headers({"Accept: application/json;q=0.9", "Content-Type: application/json"})
@Delete(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network"
+ "/virtualNetworks/{virtualNetworkName}")
@ExpectedResponses({200, 202, 204})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<Flux<ByteBuffer>>> delete(
@HostParam("$host") String endpoint,
@PathParam("resourceGroupName") String resourceGroupName,
@PathParam("virtualNetworkName") String virtualNetworkName,
@QueryParam("api-version") String apiVersion,
@PathParam("subscriptionId") String subscriptionId,
Context context);
@Headers({"Accept: application/json", "Content-Type: application/json"})
@Get(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network"
+ "/virtualNetworks/{virtualNetworkName}")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<VirtualNetworkInner>> getByResourceGroup(
@HostParam("$host") String endpoint,
@PathParam("resourceGroupName") String resourceGroupName,
@PathParam("virtualNetworkName") String virtualNetworkName,
@QueryParam("api-version") String apiVersion,
@PathParam("subscriptionId") String subscriptionId,
@QueryParam("$expand") String expand,
Context context);
@Headers({"Accept: application/json", "Content-Type: application/json"})
@Put(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network"
+ "/virtualNetworks/{virtualNetworkName}")
@ExpectedResponses({200, 201})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<Flux<ByteBuffer>>> createOrUpdate(
@HostParam("$host") String endpoint,
@PathParam("resourceGroupName") String resourceGroupName,
@PathParam("virtualNetworkName") String virtualNetworkName,
@QueryParam("api-version") String apiVersion,
@PathParam("subscriptionId") String subscriptionId,
@BodyParam("application/json") VirtualNetworkInner parameters,
Context context);
@Headers({"Accept: application/json", "Content-Type: application/json"})
@Patch(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network"
+ "/virtualNetworks/{virtualNetworkName}")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<VirtualNetworkInner>> updateTags(
@HostParam("$host") String endpoint,
@PathParam("resourceGroupName") String resourceGroupName,
@PathParam("virtualNetworkName") String virtualNetworkName,
@QueryParam("api-version") String apiVersion,
@PathParam("subscriptionId") String subscriptionId,
@BodyParam("application/json") TagsObject parameters,
Context context);
@Headers({"Accept: application/json", "Content-Type: application/json"})
@Get("/subscriptions/{subscriptionId}/providers/Microsoft.Network/virtualNetworks")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<VirtualNetworkListResultInner>> list(
@HostParam("$host") String endpoint,
@QueryParam("api-version") String apiVersion,
@PathParam("subscriptionId") String subscriptionId,
Context context);
@Headers({"Accept: application/json", "Content-Type: application/json"})
@Get(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network"
+ "/virtualNetworks")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<VirtualNetworkListResultInner>> listByResourceGroup(
@HostParam("$host") String endpoint,
@PathParam("resourceGroupName") String resourceGroupName,
@QueryParam("api-version") String apiVersion,
@PathParam("subscriptionId") String subscriptionId,
Context context);
@Headers({"Accept: application/json", "Content-Type: application/json"})
@Get(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network"
+ "/virtualNetworks/{virtualNetworkName}/CheckIPAddressAvailability")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<IpAddressAvailabilityResultInner>> checkIpAddressAvailability(
@HostParam("$host") String endpoint,
@PathParam("resourceGroupName") String resourceGroupName,
@PathParam("virtualNetworkName") String virtualNetworkName,
@QueryParam("ipAddress") String ipAddress,
@QueryParam("api-version") String apiVersion,
@PathParam("subscriptionId") String subscriptionId,
Context context);
@Headers({"Accept: application/json", "Content-Type: application/json"})
@Get(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network"
+ "/virtualNetworks/{virtualNetworkName}/usages")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<VirtualNetworkListUsageResultInner>> listUsage(
@HostParam("$host") String endpoint,
@PathParam("resourceGroupName") String resourceGroupName,
@PathParam("virtualNetworkName") String virtualNetworkName,
@QueryParam("api-version") String apiVersion,
@PathParam("subscriptionId") String subscriptionId,
Context context);
@Headers({"Accept: application/json", "Content-Type: application/json"})
@Get("{nextLink}")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<VirtualNetworkListResultInner>> listAllNext(
@PathParam(value = "nextLink", encoded = true) String nextLink, Context context);
@Headers({"Accept: application/json", "Content-Type: application/json"})
@Get("{nextLink}")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<VirtualNetworkListResultInner>> listNext(
@PathParam(value = "nextLink", encoded = true) String nextLink, Context context);
@Headers({"Accept: application/json", "Content-Type: application/json"})
@Get("{nextLink}")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<VirtualNetworkListUsageResultInner>> listUsageNext(
@PathParam(value = "nextLink", encoded = true) String nextLink, Context context);
}
/**
* Deletes the specified virtual network.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<Flux<ByteBuffer>>> deleteWithResponseAsync(
String resourceGroupName, String virtualNetworkName) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (virtualNetworkName == null) {
return Mono
.error(new IllegalArgumentException("Parameter virtualNetworkName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String apiVersion = "2020-05-01";
return FluxUtil
.withContext(
context ->
service
.delete(
this.client.getEndpoint(),
resourceGroupName,
virtualNetworkName,
apiVersion,
this.client.getSubscriptionId(),
context))
.subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext())));
}
/**
* Deletes the specified virtual network.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<Flux<ByteBuffer>>> deleteWithResponseAsync(
String resourceGroupName, String virtualNetworkName, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (virtualNetworkName == null) {
return Mono
.error(new IllegalArgumentException("Parameter virtualNetworkName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String apiVersion = "2020-05-01";
context = this.client.mergeContext(context);
return service
.delete(
this.client.getEndpoint(),
resourceGroupName,
virtualNetworkName,
apiVersion,
this.client.getSubscriptionId(),
context);
}
/**
* Deletes the specified virtual network.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PollerFlux<PollResult<Void>, Void> beginDeleteAsync(String resourceGroupName, String virtualNetworkName) {
Mono<Response<Flux<ByteBuffer>>> mono = deleteWithResponseAsync(resourceGroupName, virtualNetworkName);
return this
.client
.<Void, Void>getLroResult(mono, this.client.getHttpPipeline(), Void.class, Void.class, Context.NONE);
}
/**
* Deletes the specified virtual network.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PollerFlux<PollResult<Void>, Void> beginDeleteAsync(
String resourceGroupName, String virtualNetworkName, Context context) {
context = this.client.mergeContext(context);
Mono<Response<Flux<ByteBuffer>>> mono = deleteWithResponseAsync(resourceGroupName, virtualNetworkName, context);
return this
.client
.<Void, Void>getLroResult(mono, this.client.getHttpPipeline(), Void.class, Void.class, context);
}
/**
* Deletes the specified virtual network.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public SyncPoller<PollResult<Void>, Void> beginDelete(String resourceGroupName, String virtualNetworkName) {
return beginDeleteAsync(resourceGroupName, virtualNetworkName).getSyncPoller();
}
/**
* Deletes the specified virtual network.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public SyncPoller<PollResult<Void>, Void> beginDelete(
String resourceGroupName, String virtualNetworkName, Context context) {
return beginDeleteAsync(resourceGroupName, virtualNetworkName, context).getSyncPoller();
}
/**
* Deletes the specified virtual network.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Void> deleteAsync(String resourceGroupName, String virtualNetworkName) {
return beginDeleteAsync(resourceGroupName, virtualNetworkName)
.last()
.flatMap(this.client::getLroFinalResultOrError);
}
/**
* Deletes the specified virtual network.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Void> deleteAsync(String resourceGroupName, String virtualNetworkName, Context context) {
return beginDeleteAsync(resourceGroupName, virtualNetworkName, context)
.last()
.flatMap(this.client::getLroFinalResultOrError);
}
/**
* Deletes the specified virtual network.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void delete(String resourceGroupName, String virtualNetworkName) {
deleteAsync(resourceGroupName, virtualNetworkName).block();
}
/**
* Deletes the specified virtual network.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void delete(String resourceGroupName, String virtualNetworkName, Context context) {
deleteAsync(resourceGroupName, virtualNetworkName, context).block();
}
/**
* Gets the specified virtual network by resource group.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param expand Expands referenced resources.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the specified virtual network by resource group.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<VirtualNetworkInner>> getByResourceGroupWithResponseAsync(
String resourceGroupName, String virtualNetworkName, String expand) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (virtualNetworkName == null) {
return Mono
.error(new IllegalArgumentException("Parameter virtualNetworkName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String apiVersion = "2020-05-01";
return FluxUtil
.withContext(
context ->
service
.getByResourceGroup(
this.client.getEndpoint(),
resourceGroupName,
virtualNetworkName,
apiVersion,
this.client.getSubscriptionId(),
expand,
context))
.subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext())));
}
/**
* Gets the specified virtual network by resource group.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param expand Expands referenced resources.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the specified virtual network by resource group.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<VirtualNetworkInner>> getByResourceGroupWithResponseAsync(
String resourceGroupName, String virtualNetworkName, String expand, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (virtualNetworkName == null) {
return Mono
.error(new IllegalArgumentException("Parameter virtualNetworkName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String apiVersion = "2020-05-01";
context = this.client.mergeContext(context);
return service
.getByResourceGroup(
this.client.getEndpoint(),
resourceGroupName,
virtualNetworkName,
apiVersion,
this.client.getSubscriptionId(),
expand,
context);
}
/**
* Gets the specified virtual network by resource group.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param expand Expands referenced resources.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the specified virtual network by resource group.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VirtualNetworkInner> getByResourceGroupAsync(
String resourceGroupName, String virtualNetworkName, String expand) {
return getByResourceGroupWithResponseAsync(resourceGroupName, virtualNetworkName, expand)
.flatMap(
(Response<VirtualNetworkInner> res) -> {
if (res.getValue() != null) {
return Mono.just(res.getValue());
} else {
return Mono.empty();
}
});
}
/**
* Gets the specified virtual network by resource group.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param expand Expands referenced resources.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the specified virtual network by resource group.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VirtualNetworkInner> getByResourceGroupAsync(
String resourceGroupName, String virtualNetworkName, String expand, Context context) {
return getByResourceGroupWithResponseAsync(resourceGroupName, virtualNetworkName, expand, context)
.flatMap(
(Response<VirtualNetworkInner> res) -> {
if (res.getValue() != null) {
return Mono.just(res.getValue());
} else {
return Mono.empty();
}
});
}
/**
* Gets the specified virtual network by resource group.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the specified virtual network by resource group.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VirtualNetworkInner> getByResourceGroupAsync(String resourceGroupName, String virtualNetworkName) {
final String expand = null;
final Context context = null;
return getByResourceGroupWithResponseAsync(resourceGroupName, virtualNetworkName, expand)
.flatMap(
(Response<VirtualNetworkInner> res) -> {
if (res.getValue() != null) {
return Mono.just(res.getValue());
} else {
return Mono.empty();
}
});
}
/**
* Gets the specified virtual network by resource group.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param expand Expands referenced resources.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the specified virtual network by resource group.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public VirtualNetworkInner getByResourceGroup(String resourceGroupName, String virtualNetworkName, String expand) {
return getByResourceGroupAsync(resourceGroupName, virtualNetworkName, expand).block();
}
/**
* Gets the specified virtual network by resource group.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param expand Expands referenced resources.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the specified virtual network by resource group.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public VirtualNetworkInner getByResourceGroup(
String resourceGroupName, String virtualNetworkName, String expand, Context context) {
return getByResourceGroupAsync(resourceGroupName, virtualNetworkName, expand, context).block();
}
/**
* Gets the specified virtual network by resource group.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the specified virtual network by resource group.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public VirtualNetworkInner getByResourceGroup(String resourceGroupName, String virtualNetworkName) {
final String expand = null;
final Context context = null;
return getByResourceGroupAsync(resourceGroupName, virtualNetworkName, expand).block();
}
/**
* Creates or updates a virtual network in the specified resource group.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param parameters Virtual Network resource.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual Network resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<Flux<ByteBuffer>>> createOrUpdateWithResponseAsync(
String resourceGroupName, String virtualNetworkName, VirtualNetworkInner parameters) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (virtualNetworkName == null) {
return Mono
.error(new IllegalArgumentException("Parameter virtualNetworkName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (parameters == null) {
return Mono.error(new IllegalArgumentException("Parameter parameters is required and cannot be null."));
} else {
parameters.validate();
}
final String apiVersion = "2020-05-01";
return FluxUtil
.withContext(
context ->
service
.createOrUpdate(
this.client.getEndpoint(),
resourceGroupName,
virtualNetworkName,
apiVersion,
this.client.getSubscriptionId(),
parameters,
context))
.subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext())));
}
/**
* Creates or updates a virtual network in the specified resource group.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param parameters Virtual Network resource.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual Network resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<Flux<ByteBuffer>>> createOrUpdateWithResponseAsync(
String resourceGroupName, String virtualNetworkName, VirtualNetworkInner parameters, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (virtualNetworkName == null) {
return Mono
.error(new IllegalArgumentException("Parameter virtualNetworkName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (parameters == null) {
return Mono.error(new IllegalArgumentException("Parameter parameters is required and cannot be null."));
} else {
parameters.validate();
}
final String apiVersion = "2020-05-01";
context = this.client.mergeContext(context);
return service
.createOrUpdate(
this.client.getEndpoint(),
resourceGroupName,
virtualNetworkName,
apiVersion,
this.client.getSubscriptionId(),
parameters,
context);
}
/**
* Creates or updates a virtual network in the specified resource group.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param parameters Virtual Network resource.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual Network resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PollerFlux<PollResult<VirtualNetworkInner>, VirtualNetworkInner> beginCreateOrUpdateAsync(
String resourceGroupName, String virtualNetworkName, VirtualNetworkInner parameters) {
Mono<Response<Flux<ByteBuffer>>> mono =
createOrUpdateWithResponseAsync(resourceGroupName, virtualNetworkName, parameters);
return this
.client
.<VirtualNetworkInner, VirtualNetworkInner>getLroResult(
mono,
this.client.getHttpPipeline(),
VirtualNetworkInner.class,
VirtualNetworkInner.class,
Context.NONE);
}
/**
* Creates or updates a virtual network in the specified resource group.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param parameters Virtual Network resource.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual Network resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PollerFlux<PollResult<VirtualNetworkInner>, VirtualNetworkInner> beginCreateOrUpdateAsync(
String resourceGroupName, String virtualNetworkName, VirtualNetworkInner parameters, Context context) {
context = this.client.mergeContext(context);
Mono<Response<Flux<ByteBuffer>>> mono =
createOrUpdateWithResponseAsync(resourceGroupName, virtualNetworkName, parameters, context);
return this
.client
.<VirtualNetworkInner, VirtualNetworkInner>getLroResult(
mono, this.client.getHttpPipeline(), VirtualNetworkInner.class, VirtualNetworkInner.class, context);
}
/**
* Creates or updates a virtual network in the specified resource group.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param parameters Virtual Network resource.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual Network resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public SyncPoller<PollResult<VirtualNetworkInner>, VirtualNetworkInner> beginCreateOrUpdate(
String resourceGroupName, String virtualNetworkName, VirtualNetworkInner parameters) {
return beginCreateOrUpdateAsync(resourceGroupName, virtualNetworkName, parameters).getSyncPoller();
}
/**
* Creates or updates a virtual network in the specified resource group.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param parameters Virtual Network resource.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual Network resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public SyncPoller<PollResult<VirtualNetworkInner>, VirtualNetworkInner> beginCreateOrUpdate(
String resourceGroupName, String virtualNetworkName, VirtualNetworkInner parameters, Context context) {
return beginCreateOrUpdateAsync(resourceGroupName, virtualNetworkName, parameters, context).getSyncPoller();
}
/**
* Creates or updates a virtual network in the specified resource group.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param parameters Virtual Network resource.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual Network resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VirtualNetworkInner> createOrUpdateAsync(
String resourceGroupName, String virtualNetworkName, VirtualNetworkInner parameters) {
return beginCreateOrUpdateAsync(resourceGroupName, virtualNetworkName, parameters)
.last()
.flatMap(this.client::getLroFinalResultOrError);
}
/**
* Creates or updates a virtual network in the specified resource group.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param parameters Virtual Network resource.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual Network resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VirtualNetworkInner> createOrUpdateAsync(
String resourceGroupName, String virtualNetworkName, VirtualNetworkInner parameters, Context context) {
return beginCreateOrUpdateAsync(resourceGroupName, virtualNetworkName, parameters, context)
.last()
.flatMap(this.client::getLroFinalResultOrError);
}
/**
* Creates or updates a virtual network in the specified resource group.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param parameters Virtual Network resource.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual Network resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public VirtualNetworkInner createOrUpdate(
String resourceGroupName, String virtualNetworkName, VirtualNetworkInner parameters) {
return createOrUpdateAsync(resourceGroupName, virtualNetworkName, parameters).block();
}
/**
* Creates or updates a virtual network in the specified resource group.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param parameters Virtual Network resource.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual Network resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public VirtualNetworkInner createOrUpdate(
String resourceGroupName, String virtualNetworkName, VirtualNetworkInner parameters, Context context) {
return createOrUpdateAsync(resourceGroupName, virtualNetworkName, parameters, context).block();
}
/**
* Updates a virtual network tags.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param tags Resource tags.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual Network resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<VirtualNetworkInner>> updateTagsWithResponseAsync(
String resourceGroupName, String virtualNetworkName, Map<String, String> tags) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (virtualNetworkName == null) {
return Mono
.error(new IllegalArgumentException("Parameter virtualNetworkName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String apiVersion = "2020-05-01";
TagsObject parameters = new TagsObject();
parameters.withTags(tags);
return FluxUtil
.withContext(
context ->
service
.updateTags(
this.client.getEndpoint(),
resourceGroupName,
virtualNetworkName,
apiVersion,
this.client.getSubscriptionId(),
parameters,
context))
.subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext())));
}
/**
* Updates a virtual network tags.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param tags Resource tags.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual Network resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<VirtualNetworkInner>> updateTagsWithResponseAsync(
String resourceGroupName, String virtualNetworkName, Map<String, String> tags, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (virtualNetworkName == null) {
return Mono
.error(new IllegalArgumentException("Parameter virtualNetworkName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String apiVersion = "2020-05-01";
TagsObject parameters = new TagsObject();
parameters.withTags(tags);
context = this.client.mergeContext(context);
return service
.updateTags(
this.client.getEndpoint(),
resourceGroupName,
virtualNetworkName,
apiVersion,
this.client.getSubscriptionId(),
parameters,
context);
}
/**
* Updates a virtual network tags.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param tags Resource tags.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual Network resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VirtualNetworkInner> updateTagsAsync(
String resourceGroupName, String virtualNetworkName, Map<String, String> tags) {
return updateTagsWithResponseAsync(resourceGroupName, virtualNetworkName, tags)
.flatMap(
(Response<VirtualNetworkInner> res) -> {
if (res.getValue() != null) {
return Mono.just(res.getValue());
} else {
return Mono.empty();
}
});
}
/**
* Updates a virtual network tags.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param tags Resource tags.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual Network resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VirtualNetworkInner> updateTagsAsync(
String resourceGroupName, String virtualNetworkName, Map<String, String> tags, Context context) {
return updateTagsWithResponseAsync(resourceGroupName, virtualNetworkName, tags, context)
.flatMap(
(Response<VirtualNetworkInner> res) -> {
if (res.getValue() != null) {
return Mono.just(res.getValue());
} else {
return Mono.empty();
}
});
}
/**
* Updates a virtual network tags.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param tags Resource tags.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual Network resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public VirtualNetworkInner updateTags(
String resourceGroupName, String virtualNetworkName, Map<String, String> tags) {
return updateTagsAsync(resourceGroupName, virtualNetworkName, tags).block();
}
/**
* Updates a virtual network tags.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param tags Resource tags.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual Network resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public VirtualNetworkInner updateTags(
String resourceGroupName, String virtualNetworkName, Map<String, String> tags, Context context) {
return updateTagsAsync(resourceGroupName, virtualNetworkName, tags, context).block();
}
/**
* Gets all virtual networks in a subscription.
*
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all virtual networks in a subscription.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<PagedResponse<VirtualNetworkInner>> listSinglePageAsync() {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String apiVersion = "2020-05-01";
return FluxUtil
.withContext(
context ->
service.list(this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), context))
.<PagedResponse<VirtualNetworkInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext())));
}
/**
* Gets all virtual networks in a subscription.
*
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all virtual networks in a subscription.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<PagedResponse<VirtualNetworkInner>> listSinglePageAsync(Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String apiVersion = "2020-05-01";
context = this.client.mergeContext(context);
return service
.list(this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
}
/**
* Gets all virtual networks in a subscription.
*
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all virtual networks in a subscription.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<VirtualNetworkInner> listAsync() {
return new PagedFlux<>(() -> listSinglePageAsync(), nextLink -> listAllNextSinglePageAsync(nextLink));
}
/**
* Gets all virtual networks in a subscription.
*
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all virtual networks in a subscription.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<VirtualNetworkInner> listAsync(Context context) {
return new PagedFlux<>(
() -> listSinglePageAsync(context), nextLink -> listAllNextSinglePageAsync(nextLink, context));
}
/**
* Gets all virtual networks in a subscription.
*
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all virtual networks in a subscription.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<VirtualNetworkInner> list() {
return new PagedIterable<>(listAsync());
}
/**
* Gets all virtual networks in a subscription.
*
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all virtual networks in a subscription.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<VirtualNetworkInner> list(Context context) {
return new PagedIterable<>(listAsync(context));
}
/**
* Gets all virtual networks in a resource group.
*
* @param resourceGroupName The name of the resource group.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all virtual networks in a resource group.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<PagedResponse<VirtualNetworkInner>> listByResourceGroupSinglePageAsync(String resourceGroupName) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String apiVersion = "2020-05-01";
return FluxUtil
.withContext(
context ->
service
.listByResourceGroup(
this.client.getEndpoint(),
resourceGroupName,
apiVersion,
this.client.getSubscriptionId(),
context))
.<PagedResponse<VirtualNetworkInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext())));
}
/**
* Gets all virtual networks in a resource group.
*
* @param resourceGroupName The name of the resource group.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all virtual networks in a resource group.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<PagedResponse<VirtualNetworkInner>> listByResourceGroupSinglePageAsync(
String resourceGroupName, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String apiVersion = "2020-05-01";
context = this.client.mergeContext(context);
return service
.listByResourceGroup(
this.client.getEndpoint(), resourceGroupName, apiVersion, this.client.getSubscriptionId(), context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
}
/**
* Gets all virtual networks in a resource group.
*
* @param resourceGroupName The name of the resource group.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all virtual networks in a resource group.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<VirtualNetworkInner> listByResourceGroupAsync(String resourceGroupName) {
return new PagedFlux<>(
() -> listByResourceGroupSinglePageAsync(resourceGroupName), nextLink -> listNextSinglePageAsync(nextLink));
}
/**
* Gets all virtual networks in a resource group.
*
* @param resourceGroupName The name of the resource group.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all virtual networks in a resource group.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<VirtualNetworkInner> listByResourceGroupAsync(String resourceGroupName, Context context) {
return new PagedFlux<>(
() -> listByResourceGroupSinglePageAsync(resourceGroupName, context),
nextLink -> listNextSinglePageAsync(nextLink, context));
}
/**
* Gets all virtual networks in a resource group.
*
* @param resourceGroupName The name of the resource group.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all virtual networks in a resource group.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<VirtualNetworkInner> listByResourceGroup(String resourceGroupName) {
return new PagedIterable<>(listByResourceGroupAsync(resourceGroupName));
}
/**
* Gets all virtual networks in a resource group.
*
* @param resourceGroupName The name of the resource group.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all virtual networks in a resource group.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<VirtualNetworkInner> listByResourceGroup(String resourceGroupName, Context context) {
return new PagedIterable<>(listByResourceGroupAsync(resourceGroupName, context));
}
/**
* Checks whether a private IP address is available for use.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param ipAddress The private IP address to be verified.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response for CheckIPAddressAvailability API service call.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<IpAddressAvailabilityResultInner>> checkIpAddressAvailabilityWithResponseAsync(
String resourceGroupName, String virtualNetworkName, String ipAddress) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (virtualNetworkName == null) {
return Mono
.error(new IllegalArgumentException("Parameter virtualNetworkName is required and cannot be null."));
}
if (ipAddress == null) {
return Mono.error(new IllegalArgumentException("Parameter ipAddress is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String apiVersion = "2020-05-01";
return FluxUtil
.withContext(
context ->
service
.checkIpAddressAvailability(
this.client.getEndpoint(),
resourceGroupName,
virtualNetworkName,
ipAddress,
apiVersion,
this.client.getSubscriptionId(),
context))
.subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext())));
}
/**
* Checks whether a private IP address is available for use.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param ipAddress The private IP address to be verified.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response for CheckIPAddressAvailability API service call.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<IpAddressAvailabilityResultInner>> checkIpAddressAvailabilityWithResponseAsync(
String resourceGroupName, String virtualNetworkName, String ipAddress, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (virtualNetworkName == null) {
return Mono
.error(new IllegalArgumentException("Parameter virtualNetworkName is required and cannot be null."));
}
if (ipAddress == null) {
return Mono.error(new IllegalArgumentException("Parameter ipAddress is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String apiVersion = "2020-05-01";
context = this.client.mergeContext(context);
return service
.checkIpAddressAvailability(
this.client.getEndpoint(),
resourceGroupName,
virtualNetworkName,
ipAddress,
apiVersion,
this.client.getSubscriptionId(),
context);
}
/**
* Checks whether a private IP address is available for use.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param ipAddress The private IP address to be verified.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response for CheckIPAddressAvailability API service call.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<IpAddressAvailabilityResultInner> checkIpAddressAvailabilityAsync(
String resourceGroupName, String virtualNetworkName, String ipAddress) {
return checkIpAddressAvailabilityWithResponseAsync(resourceGroupName, virtualNetworkName, ipAddress)
.flatMap(
(Response<IpAddressAvailabilityResultInner> res) -> {
if (res.getValue() != null) {
return Mono.just(res.getValue());
} else {
return Mono.empty();
}
});
}
/**
* Checks whether a private IP address is available for use.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param ipAddress The private IP address to be verified.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response for CheckIPAddressAvailability API service call.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<IpAddressAvailabilityResultInner> checkIpAddressAvailabilityAsync(
String resourceGroupName, String virtualNetworkName, String ipAddress, Context context) {
return checkIpAddressAvailabilityWithResponseAsync(resourceGroupName, virtualNetworkName, ipAddress, context)
.flatMap(
(Response<IpAddressAvailabilityResultInner> res) -> {
if (res.getValue() != null) {
return Mono.just(res.getValue());
} else {
return Mono.empty();
}
});
}
/**
* Checks whether a private IP address is available for use.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param ipAddress The private IP address to be verified.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response for CheckIPAddressAvailability API service call.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public IpAddressAvailabilityResultInner checkIpAddressAvailability(
String resourceGroupName, String virtualNetworkName, String ipAddress) {
return checkIpAddressAvailabilityAsync(resourceGroupName, virtualNetworkName, ipAddress).block();
}
/**
* Checks whether a private IP address is available for use.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param ipAddress The private IP address to be verified.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response for CheckIPAddressAvailability API service call.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public IpAddressAvailabilityResultInner checkIpAddressAvailability(
String resourceGroupName, String virtualNetworkName, String ipAddress, Context context) {
return checkIpAddressAvailabilityAsync(resourceGroupName, virtualNetworkName, ipAddress, context).block();
}
/**
* Lists usage stats.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response for the virtual networks GetUsage API service call.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<PagedResponse<VirtualNetworkUsageInner>> listUsageSinglePageAsync(
String resourceGroupName, String virtualNetworkName) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (virtualNetworkName == null) {
return Mono
.error(new IllegalArgumentException("Parameter virtualNetworkName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String apiVersion = "2020-05-01";
return FluxUtil
.withContext(
context ->
service
.listUsage(
this.client.getEndpoint(),
resourceGroupName,
virtualNetworkName,
apiVersion,
this.client.getSubscriptionId(),
context))
.<PagedResponse<VirtualNetworkUsageInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext())));
}
/**
* Lists usage stats.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response for the virtual networks GetUsage API service call.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<PagedResponse<VirtualNetworkUsageInner>> listUsageSinglePageAsync(
String resourceGroupName, String virtualNetworkName, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (virtualNetworkName == null) {
return Mono
.error(new IllegalArgumentException("Parameter virtualNetworkName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String apiVersion = "2020-05-01";
context = this.client.mergeContext(context);
return service
.listUsage(
this.client.getEndpoint(),
resourceGroupName,
virtualNetworkName,
apiVersion,
this.client.getSubscriptionId(),
context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
}
/**
* Lists usage stats.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response for the virtual networks GetUsage API service call.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<VirtualNetworkUsageInner> listUsageAsync(String resourceGroupName, String virtualNetworkName) {
return new PagedFlux<>(
() -> listUsageSinglePageAsync(resourceGroupName, virtualNetworkName),
nextLink -> listUsageNextSinglePageAsync(nextLink));
}
/**
* Lists usage stats.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response for the virtual networks GetUsage API service call.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<VirtualNetworkUsageInner> listUsageAsync(
String resourceGroupName, String virtualNetworkName, Context context) {
return new PagedFlux<>(
() -> listUsageSinglePageAsync(resourceGroupName, virtualNetworkName, context),
nextLink -> listUsageNextSinglePageAsync(nextLink, context));
}
/**
* Lists usage stats.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response for the virtual networks GetUsage API service call.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<VirtualNetworkUsageInner> listUsage(String resourceGroupName, String virtualNetworkName) {
return new PagedIterable<>(listUsageAsync(resourceGroupName, virtualNetworkName));
}
/**
* Lists usage stats.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response for the virtual networks GetUsage API service call.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<VirtualNetworkUsageInner> listUsage(
String resourceGroupName, String virtualNetworkName, Context context) {
return new PagedIterable<>(listUsageAsync(resourceGroupName, virtualNetworkName, context));
}
/**
* Get the next page of items.
*
* @param nextLink The nextLink parameter.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response for the ListVirtualNetworks API service call.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<PagedResponse<VirtualNetworkInner>> listAllNextSinglePageAsync(String nextLink) {
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
return FluxUtil
.withContext(context -> service.listAllNext(nextLink, context))
.<PagedResponse<VirtualNetworkInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext())));
}
/**
* Get the next page of items.
*
* @param nextLink The nextLink parameter.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response for the ListVirtualNetworks API service call.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<PagedResponse<VirtualNetworkInner>> listAllNextSinglePageAsync(String nextLink, Context context) {
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
context = this.client.mergeContext(context);
return service
.listAllNext(nextLink, context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
}
/**
* Get the next page of items.
*
* @param nextLink The nextLink parameter.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response for the ListVirtualNetworks API service call.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<PagedResponse<VirtualNetworkInner>> listNextSinglePageAsync(String nextLink) {
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
return FluxUtil
.withContext(context -> service.listNext(nextLink, context))
.<PagedResponse<VirtualNetworkInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext())));
}
/**
* Get the next page of items.
*
* @param nextLink The nextLink parameter.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response for the ListVirtualNetworks API service call.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<PagedResponse<VirtualNetworkInner>> listNextSinglePageAsync(String nextLink, Context context) {
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
context = this.client.mergeContext(context);
return service
.listNext(nextLink, context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
}
/**
* Get the next page of items.
*
* @param nextLink The nextLink parameter.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response for the virtual networks GetUsage API service call.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<PagedResponse<VirtualNetworkUsageInner>> listUsageNextSinglePageAsync(String nextLink) {
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
return FluxUtil
.withContext(context -> service.listUsageNext(nextLink, context))
.<PagedResponse<VirtualNetworkUsageInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext())));
}
/**
* Get the next page of items.
*
* @param nextLink The nextLink parameter.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response for the virtual networks GetUsage API service call.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<PagedResponse<VirtualNetworkUsageInner>> listUsageNextSinglePageAsync(
String nextLink, Context context) {
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
context = this.client.mergeContext(context);
return service
.listUsageNext(nextLink, context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
}
}
| 48.334531
| 120
| 0.652297
|
3155e690075854937ec71be3ae94d7ec6201af00
| 4,390
|
package com.mantledillusion.injection.hura.core.injection;
import com.mantledillusion.injection.hura.core.AbstractInjectionTest;
import com.mantledillusion.injection.hura.core.exception.InjectionException;
import com.mantledillusion.injection.hura.core.exception.ProcessorException;
import com.mantledillusion.injection.hura.core.injection.injectables.InjectableWithAnnotatedConstructor;
import com.mantledillusion.injection.hura.core.injection.injectables.InjectableWithExplicitIndependent;
import com.mantledillusion.injection.hura.core.injection.injectables.InjectableWithInjectableConstructor;
import com.mantledillusion.injection.hura.core.injection.injectables.InjectableWithInjectableField;
import com.mantledillusion.injection.hura.core.injection.uninjectables.*;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
public class BasicInjectionTest extends AbstractInjectionTest {
@Test
public void testNullInjection() {
Assertions.assertThrows(IllegalArgumentException.class, () -> this.suite.injectInSuiteContext((Class<?>) null));
}
@Test
public void testBasicFieldInjection() {
InjectableWithInjectableField injectable = this.suite.injectInSuiteContext(InjectableWithInjectableField.class);
Assertions.assertTrue(injectable.wiredField != null);
}
@Test
public void testExceptionThrowingConstructorInjection() {
Assertions.assertThrows(InjectionException.class, () -> this.suite.injectInSuiteContext(UninjectableWithExceptionThrowingConstructor.class));
}
@Test
public void testStaticFieldInjection() {
Assertions.assertThrows(ProcessorException.class, () -> this.suite.injectInSuiteContext(UninjectableWithStaticWiredField.class));
}
@Test
public void testFinalFieldInjection() {
Assertions.assertThrows(ProcessorException.class, () -> this.suite.injectInSuiteContext(UninjectableWithFinalWiredField.class));
}
@Test
public void testBasicConstructorInjection() {
InjectableWithInjectableConstructor injectable = this.suite
.injectInSuiteContext(InjectableWithInjectableConstructor.class);
Assertions.assertTrue(injectable.wiredThroughConstructor != null);
}
@Test
public void testUninjectableConstructorInjection() {
Assertions.assertThrows(InjectionException.class, () -> this.suite.injectInSuiteContext(UninjectableWithUninjectableConstructor.class));
}
@Test
public void testMultipleInjectableConstructorInjection() {
Assertions.assertThrows(InjectionException.class, () -> this.suite.injectInSuiteContext(UninjectableWith2InjectableConstructors.class));
}
@Test
public void testUseAnnotatedConstructorInjection() {
InjectableWithAnnotatedConstructor injectable = this.suite
.injectInSuiteContext(InjectableWithAnnotatedConstructor.class);
Assertions.assertTrue(injectable.onlyWiredThroughUseAnnotatedConstructor != null);
}
@Test
public void testMultipleUseAnnotatedConstructorInjection() {
Assertions.assertThrows(InjectionException.class, () -> this.suite.injectInSuiteContext(UninjectableWith2UseAnnotatedConstructors.class));
}
@Test
public void testIncompleteUseAnnotatedConstructorInjection() {
Assertions.assertThrows(ProcessorException.class, () -> this.suite.injectInSuiteContext(UninjectableWithIncompleteUseAnnotatedConstructor.class));
}
@Test
public void testMissingUseAnnotationOnNonPublicNoArgsConstructorInjection() {
Assertions.assertThrows(InjectionException.class, () -> this.suite.injectInSuiteContext(UninjectableWithMissingUseAnnotationOnNonPublicNoArgsConstructor.class));
}
@Test
public void testInterfaceInjection() {
Assertions.assertThrows(InjectionException.class, () -> this.suite.injectInSuiteContext(UninjectableWithWiredInterfaceField.class));
}
@Test
public void testSelfInjection() {
Assertions.assertThrows(InjectionException.class, () -> this.suite.injectInSuiteContext(UninjectableWithWiredSelf.class));
}
@Test
public void testOptionalIndependentInjection() {
InjectableWithExplicitIndependent injectable = this.suite
.injectInSuiteContext(InjectableWithExplicitIndependent.class);
Assertions.assertTrue(injectable.explicitInjectable == null);
}
@Test
public void testOptionalIndependentInjectionWithMissingInjectAnnotation() {
Assertions.assertThrows(ProcessorException.class, () -> this.suite.injectInSuiteContext(UninjectableWithOptionalInjectableAndMissingInjectAnnotation.class));
}
}
| 41.028037
| 163
| 0.834396
|
8ebdaa2645bc7ed36a83e9797eeb32a4f1dc9868
| 4,733
|
package io.quarkus.hibernate.orm.runtime.service;
import java.util.ArrayList;
import java.util.List;
import org.hibernate.boot.cfgxml.internal.CfgXmlAccessServiceInitiator;
import org.hibernate.boot.registry.StandardServiceInitiator;
import org.hibernate.engine.config.internal.ConfigurationServiceInitiator;
import org.hibernate.engine.jdbc.batch.internal.BatchBuilderInitiator;
import org.hibernate.engine.jdbc.connections.internal.MultiTenantConnectionProviderInitiator;
import org.hibernate.engine.jdbc.cursor.internal.RefCursorSupportInitiator;
import org.hibernate.engine.jdbc.dialect.internal.DialectResolverInitiator;
import org.hibernate.engine.jdbc.env.internal.JdbcEnvironmentInitiator;
import org.hibernate.engine.jdbc.internal.JdbcServicesInitiator;
import org.hibernate.event.internal.EntityCopyObserverFactoryInitiator;
import org.hibernate.hql.internal.QueryTranslatorFactoryInitiator;
import org.hibernate.persister.internal.PersisterClassResolverInitiator;
import org.hibernate.persister.internal.PersisterFactoryInitiator;
import org.hibernate.property.access.internal.PropertyAccessStrategyResolverInitiator;
import org.hibernate.resource.beans.spi.ManagedBeanRegistryInitiator;
import org.hibernate.resource.transaction.internal.TransactionCoordinatorBuilderInitiator;
import org.hibernate.service.internal.SessionFactoryServiceRegistryFactoryInitiator;
import org.hibernate.tool.hbm2ddl.ImportSqlCommandExtractorInitiator;
import org.hibernate.tool.schema.internal.SchemaManagementToolInitiator;
import io.quarkus.hibernate.orm.runtime.customized.BootstrapOnlyProxyFactoryFactoryInitiator;
import io.quarkus.hibernate.orm.runtime.customized.QuarkusConnectionProviderInitiator;
import io.quarkus.hibernate.orm.runtime.customized.QuarkusJndiServiceInitiator;
import io.quarkus.hibernate.orm.runtime.customized.QuarkusJtaPlatformInitiator;
/**
* Here we define the list of standard Service Initiators to be used by
* Hibernate ORM when running on Quarkus.
* WARNING: this is a customized list: we started from a copy of ORM's standard
* list, then changes have evolved.
* Also: Hibernate Reactive uses a different list.
*/
public final class StandardHibernateORMInitiatorListProvider implements InitialInitiatorListProvider {
@Override
public List<StandardServiceInitiator> initialInitiatorList() {
final ArrayList<StandardServiceInitiator> serviceInitiators = new ArrayList<StandardServiceInitiator>();
//This one needs to be replaced after Metadata has been recorded:
serviceInitiators.add(BootstrapOnlyProxyFactoryFactoryInitiator.INSTANCE);
serviceInitiators.add(CfgXmlAccessServiceInitiator.INSTANCE);
serviceInitiators.add(ConfigurationServiceInitiator.INSTANCE);
serviceInitiators.add(PropertyAccessStrategyResolverInitiator.INSTANCE);
serviceInitiators.add(ImportSqlCommandExtractorInitiator.INSTANCE);
serviceInitiators.add(SchemaManagementToolInitiator.INSTANCE);
serviceInitiators.add(JdbcEnvironmentInitiator.INSTANCE);
// Custom one!
serviceInitiators.add(QuarkusJndiServiceInitiator.INSTANCE);
// Custom one!
serviceInitiators.add(DisabledJMXInitiator.INSTANCE);
serviceInitiators.add(PersisterClassResolverInitiator.INSTANCE);
serviceInitiators.add(PersisterFactoryInitiator.INSTANCE);
// Custom one!
serviceInitiators.add(QuarkusConnectionProviderInitiator.INSTANCE);
serviceInitiators.add(MultiTenantConnectionProviderInitiator.INSTANCE);
serviceInitiators.add(DialectResolverInitiator.INSTANCE);
// Custom one!
serviceInitiators.add(DialectFactoryInitiator.INSTANCE);
serviceInitiators.add(BatchBuilderInitiator.INSTANCE);
serviceInitiators.add(JdbcServicesInitiator.INSTANCE);
serviceInitiators.add(RefCursorSupportInitiator.INSTANCE);
serviceInitiators.add(QueryTranslatorFactoryInitiator.INSTANCE);
// Custom one! Also, this one has state so can't use the singleton.
serviceInitiators.add(new QuarkusMutableIdentifierGeneratorFactoryInitiator());// MutableIdentifierGeneratorFactoryInitiator.INSTANCE);
serviceInitiators.add(QuarkusJtaPlatformInitiator.INSTANCE);
serviceInitiators.add(SessionFactoryServiceRegistryFactoryInitiator.INSTANCE);
serviceInitiators.add(QuarkusRegionFactoryInitiator.INSTANCE);
serviceInitiators.add(TransactionCoordinatorBuilderInitiator.INSTANCE);
serviceInitiators.add(ManagedBeanRegistryInitiator.INSTANCE);
serviceInitiators.add(EntityCopyObserverFactoryInitiator.INSTANCE);
serviceInitiators.trimToSize();
return serviceInitiators;
}
}
| 47.808081
| 143
| 0.817663
|
1d29e268ef87828a752f9659afd28c189eb615bf
| 602
|
package eu.grmdev.wakshop.core.model.api;
import com.github.fluent.hibernate.H;
import eu.grmdev.wakshop.core.model.Config;
public class ConfigApi {
public synchronized Config getConfig() {
Object object = H.request(Config.class).first();
if (object != null) {
return (Config) object;
}
else {
object = Config.getDefaultConfig();
object = H.save(object);
return (Config) object;
}
}
public synchronized void save(Config config) {
H.saveOrUpdate(config);
}
public void setName(String name) {
Config c = getConfig();
c.setUsername(name);
H.saveOrUpdate(c);
}
}
| 20.066667
| 50
| 0.692691
|
7fb9812df9b9d95817a4ede260c80ee9751b7071
| 2,512
|
/*
* IpForward
* Copyright (C) 2020 Jay113355
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.jay113355.ipforward.mixin;
import com.mojang.util.UUIDTypeAdapter;
import net.jay113355.ipforward.ProxyData;
import net.minecraft.network.NetworkManager;
import net.minecraft.network.handshake.client.C00Handshake;
import net.minecraftforge.fml.common.FMLCommonHandler;
import net.minecraftforge.fml.common.ObfuscationReflectionHelper;
import net.minecraftforge.fml.common.network.NetworkRegistry;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable;
import java.net.InetSocketAddress;
/**
* Created by Jay113355 on 4/3/2020.
*/
@Mixin(FMLCommonHandler.class)
public abstract class MixinFMLCommonHandler {
@Inject(method = "handleServerHandshake(Lnet/minecraft/network/handshake/client/C00Handshake;Lnet/minecraft/network/NetworkManager;)Z",
at = @At(value = "HEAD"), remap = false)
private void onHandleServerHandshake(C00Handshake packet, NetworkManager manager, CallbackInfoReturnable<Boolean> ci) {
String[] elements = packet.ip.split("\0");
if (elements.length == 4) {
String ip = elements[1]; // The actual user IP address
String uuid = elements[2]; // The connecting user UUID (no hyphens!)
String gameProfileProp = elements[3]; // Game profile properties (texture usually)
ProxyData data = new ProxyData(new InetSocketAddress(ip, 0), UUIDTypeAdapter.fromString(uuid), gameProfileProp);
boolean hasFML = data.hasFMLMarker() || packet.hasFMLMarker();
manager.channel().attr(NetworkRegistry.FML_MARKER).set(hasFML);
manager.channel().attr(ProxyData.PROXY_KEY).set(data);
manager.socketAddress = data.getRemoteAddress();
if (hasFML != packet.hasFMLMarker()) {
ObfuscationReflectionHelper.setPrivateValue(C00Handshake.class, packet, hasFML, "hasFMLMarker");
}
}
}
}
| 42.576271
| 136
| 0.771497
|
7544d132e840ac283fe05ce78d8d962a98d373a5
| 5,151
|
package hu.unideb.inf.auror.manager.DAOs;
/*-
* #%L
* Manager
* %%
* Copyright (C) 2016 - 2018 Faculty of Informatics, University of Debrecen
* %%
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
* #L%
*/
import hu.unideb.inf.auror.manager.models.UserModel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import javax.persistence.TypedQuery;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
/**
* Data Access Object for the <code>UserModel</code> class.
*/
public class UserDAO {
/**
* SLF4J logger.
*/
private final static Logger logger = LoggerFactory.getLogger(FinancialRecordDAO.class);
/**
* Boolean that indicates whether an instance has been initialized yet.
*/
private static boolean initialized = false;
/**
* Static instance of the DAO.
*/
private static UserDAO userDAO;
/**
* Logged in user.
*/
private static UserModel currentUser;
/**
* The <code>EntityManager</code> which provides the connection to the database.
*/
private EntityManager entityManager;
/**
* The maximum id plus 1 of the FinancialRecordModels.
*/
private int nextRecordId = 0;
/**
* Basic constructor.
* Calls <code>Initialize()</code>
*/
private UserDAO() {
Initialize();
}
/**
* @return Returns or creates the only UserDAO object.
*/
public static UserDAO getInstance() {
if (!initialized)
userDAO = new UserDAO();
return userDAO;
}
/**
* Initializes the DAO, creating an <code>EntityManager</code>.
*/
private void Initialize() {
try {
EntityManagerFactory entityManagerFactory = Persistence.createEntityManagerFactory("Firebird server");
entityManager = entityManagerFactory.createEntityManager();
initialized = true;
if (GetUsers().isEmpty()) {
UserModel defaultUser = new UserModel();
defaultUser.setName("default");
defaultUser.setPassword("");
Save(defaultUser);
}
} catch (Exception e) {
logger.error(e.getMessage());
initialized = false;
}
logger.trace("UserDAO initialized.");
}
/**
* Returns every <code>UserModel</code> from the database.
*
* @return Returns a List of <code>UserModel</code>s
*/
public List<UserModel> GetUsers() {
logger.trace("UsedDAO.GetUsers()");
if (!initialized)
return new ArrayList<>();
TypedQuery<UserModel> query = entityManager.createQuery("SELECT e FROM UserModel e", UserModel.class);
return query.getResultList();
}
/**
* @return Returns the maximum id from the USERS table plus 1.
*/
private int getNextId() {
logger.trace("UserDAO.getNextId()");
Optional<Integer> maxId = GetUsers().stream().map(UserModel::getId).max(Integer::compareTo);
maxId.ifPresent(integer -> nextRecordId = integer + 1);
return nextRecordId;
}
/**
* Saves a <code>UserModel</code>.
*
* @param user The <code>UserModel</code> which will be saved.
*/
public void Save(UserModel user) {
if (user.getId() == -1) {
user.setId(getNextId());
entityManager.getTransaction().begin();
entityManager.persist(user);
entityManager.getTransaction().commit();
}
currentUser = user;
logger.info("New user is created and logged in: "+user.getName());
}
/**
* @return Returns the logged in user.
*/
UserModel GetCurrentUser() {
return currentUser;
}
/**
* Sets the logged in user.
*
* @param user The user that logs in.
*/
public void SetCurrentUser(UserModel user) {
currentUser = user;
logger.info("User logged in: "+user.getName());
}
}
| 31.218182
| 114
| 0.639099
|
2a2ba592f0fd6e3447e5dca4ffb1d89f3bc3d505
| 542
|
package com.chin.service;
/**
* @ClassName: ChinServiceImpl
* @Description: TODO 类描述
* @Author: LQH
* @Date: 2020/07/24
* @Version: 1.0
**/
public class ChinServiceImpl implements ChinService {
@Override
public void add() {
System.out.println("增加了一个用户");
}
@Override
public void delete() {
System.out.println("删除了一个用户");
}
@Override
public void update() {
System.out.println("修改用户");
}
@Override
public void select() {
System.out.println("查询用户");
}
}
| 16.9375
| 53
| 0.588561
|
f5accc34f70f78787e2692790cd3eb70c5a181d3
| 7,125
|
package com.xiahao.lib;
import com.hankz.util.dbService.OriginDbService;
import com.hankz.util.dbService.ResultDbService;
import com.hankz.util.dbutil.DCInformationFeatureModel;
import com.hankz.util.dbutil.DCInformationModel;
import java.util.*;
public class CalculateFeatures {
public static void main(String[] args) {
List<DCInformationModel> dataBase = new ArrayList<>(OriginDbService.getInstance().getAllDCInformationData("DCInformation"));
List<DCInformationStructure> originalData = new ArrayList<>();
for (DCInformationModel line : dataBase){
DCInformationStructure temp = new DCInformationStructure(line.DC);
temp.total_frequence = line.total_frequence;
temp.different_APK_frequence = line.different_APK_frequence;
temp.mainwords.addAll(Arrays.asList(line.mainwords.split(";")));
for (String string : line.DC.split("\\.")){
if (temp.mainwords.contains(string)){
temp.wordsSequence.add(string);
}
}
temp.numberOfWords = feature_one_numberOfWords(line.mainwords);
temp.wordsLenSequence.addAll(feature_three_lenOfWords(temp.wordsSequence));
temp.apks = line.APKs;
temp.urls = line.URLs;
originalData.add(temp);
//APKs and URLs will not be used temporally
}
List<DCInformationStructure> result = new ArrayList<>(feature_two_valueOfWords(originalData));
List<numpydataStructure> numpydataList = new ArrayList<>();
List<String> writeIntoFiles = new ArrayList<>();
// List<String> modelResult = new ArrayList<>();
// modelResult.addAll(FileOperator.readFileByCharacter("test.data"));
// List<Double> modelResultList = new ArrayList<>();
// for (String line : modelResult){
// for (String string : line.split(" ")){
// if (!string.equals("")) {
// modelResultList.add(Double.parseDouble(string));
// }
// }
// }
List<DCInformationFeatureModel> resultList = new ArrayList<>();
//Iterator<Double> iterator = modelResultList.iterator();
for (DCInformationStructure line : result){
List<String> wordsValueSequence = new ArrayList<>();
line.wordsValueSequence.forEach(w -> wordsValueSequence.add(Double.toString(w)));
List<String> wordsLenSequence = new ArrayList<>();
line.wordsLenSequence.forEach(w -> wordsLenSequence.add(Integer.toString(w)));
DCInformationFeatureModel temp = new DCInformationFeatureModel(
line.DC,
String.join(";", line.mainwords),
line.numberOfWords,
String.join("->", line.wordsSequence),
"(" + String.join(",", wordsValueSequence) + ")",
"(" + String.join(",", wordsLenSequence) + ")",
line.total_frequence,
line.different_APK_frequence,
line.apks,
line.urls
);
//resultList.add(temp);
// if (iterator.hasNext()){
// temp.model_choice = iterator.next().intValue();
// }
if (line.wordsLenSequence.size() > 3){
line.wordsLenSequence = line.wordsLenSequence.subList(0, 3);
line.wordsValueSequence = line.wordsValueSequence.subList(0, 3);
}
if (line.wordsLenSequence.size() <= 3){
numpydataStructure numpydata = new numpydataStructure();
numpydata.list.addAll(line.wordsValueSequence);
for (int i = 0; i < 3-line.wordsValueSequence.size(); i++){
numpydata.list.add(0.0);
}
line.wordsLenSequence.forEach(w -> numpydata.list.add(new Double(w)));
for (int i = 0; i < 3-line.wordsLenSequence.size(); i++){
numpydata.list.add(0.0);
}
String string = "";
for (Double value : numpydata.list){
string = string + " " + value.toString();
}
string = string.substring(1,string.length());
//string = line.DC + " " + string;
numpydataList.add(numpydata);
writeIntoFiles.add(string);
}
resultList.add(temp);
}
FileOperator.putLinesToFile("test_data.txt", String.join("\n", writeIntoFiles));
// ResultDbService.getInstance().updateDCInformationFeatureOnChoice(resultList);
// ResultDbService.getInstance().createTableDCInformationFeature();
// ResultDbService.getInstance().insertDCInformationFeature(resultList);
}
private static int feature_one_numberOfWords(String mainwords){
int result = 0;
for (String string : mainwords.split(";")){
if (!string.equals("")){
result++;
}
}
return result;
}
private static List<DCInformationStructure> feature_two_valueOfWords(List<DCInformationStructure> list){
Map<String, WordStructure> words = new HashMap<>();
for (DCInformationStructure line : list){
for (String string : line.mainwords){
if (!string.equals("")){
WordStructure info;
if (!words.containsKey(string)){
info = new WordStructure(string);
}
else {
info = words.get(string);
}
info.entriesInDatabase++;
words.put(string, info);
}
}
}
int numberOfEntry = list.size();
for (DCInformationStructure line : list){
for (String string : line.wordsSequence){
WordStructure info;
info = words.get(string);
int wordsInEntry = 0;
for (String string1 : line.wordsSequence){
if (string.equals(string1)){
wordsInEntry++;
}
}
double TF = calculateTF(wordsInEntry, line.wordsSequence.size());
double IDF = calculateIDF(numberOfEntry, info.entriesInDatabase);
double TF_IDF = TF * IDF;
words.put(string, info);
line.wordsValueSequence.add(TF_IDF);
}
}
return list;
}
public static double calculateTF(int x, int y){
return (double)x/(double)y;
}
public static double calculateIDF(int x, int y){
return Math.log10((double)x/(double)y);
}
private static List<Integer> feature_three_lenOfWords(List<String> wordsSequence){
List<Integer> result = new ArrayList<>();
for (String string : wordsSequence){
result.add(string.length());
}
return result;
}
}
| 37.109375
| 132
| 0.555649
|
fdc1e9f63966d98b86c289a8a7ffe88e88e4684b
| 410
|
package com.supermap.gaf.storage.exceptions;
import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ExceptionMapper;
public class StorageAuthorizationExceptionMapper implements ExceptionMapper<StorageAuthorizationException> {
@Override
public Response toResponse(StorageAuthorizationException e) {
return Response.status(Response.Status.FORBIDDEN).entity(e.getMessage()).build();
}
}
| 29.285714
| 108
| 0.792683
|
8bd1b6cf19405351aa78c7df2481d7652c16a1b8
| 2,055
|
/***************************************************************************
* Copyright 2015-2019 Kieker Project (http://kieker-monitoring.net)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
***************************************************************************/
package kieker.diagnosis.frontend.tab.methods.composite;
import org.testfx.api.FxRobot;
import kieker.diagnosis.frontend.test.Button;
import kieker.diagnosis.frontend.test.CheckBox;
import kieker.diagnosis.frontend.test.ComboBox;
import kieker.diagnosis.frontend.test.Link;
import kieker.diagnosis.frontend.test.TextField;
import lombok.Getter;
@Getter
public final class MethodsFilterPage {
private final TextField host;
private final TextField clazz;
private final TextField method;
private final TextField exception;
private final ComboBox searchType;
private final CheckBox useRegularExpression;
private final Button search;
private final Link saveAsFavorite;
public MethodsFilterPage( final FxRobot fxRobot ) {
host = new TextField( fxRobot, "#tabMethodsFilterHost" );
clazz = new TextField( fxRobot, "#tabMethodsFilterClass" );
method = new TextField( fxRobot, "#tabMethodsFilterMethod" );
exception = new TextField( fxRobot, "#tabMethodsFilterException" );
searchType = new ComboBox( fxRobot, "#tabMethodsFilterSearchType" );
useRegularExpression = new CheckBox( fxRobot, "#tabMethodsFilterUseRegExpr" );
search = new Button( fxRobot, "#tabMethodsSearch" );
saveAsFavorite = new Link( fxRobot, "#tabMethodsFilteSaveAsFavorite" );
}
}
| 40.294118
| 80
| 0.717762
|
48a702b2c95d07a1b3920586c95693a51bb5338e
| 2,218
|
// $Id: MergeSort.java 97 2005-02-28 21:18:32Z blindsey $
package com.blnz.xsl.util;
public class MergeSort
{
private MergeSort() { }
public static void sort(Comparator cmp, Object[] src)
{
sort(cmp, src, 0, src.length);
}
public static void sort(Comparator cmp, Object[] src, int off, int len)
{
sort(cmp, src, off, len, new Object[len], 0);
}
public static void sort(Comparator cmp,
Object[] src, int off, int len,
Object[] temp, int tempOff) {
if (len <= 1) {
return;
}
int halfLen = len/2;
sortCopy(cmp, src, off, halfLen, temp, tempOff);
sortCopy(cmp, src, off + halfLen, len - halfLen, temp, tempOff + halfLen);
merge(cmp, temp, tempOff, halfLen, len - halfLen, src, off);
}
private static void sortCopy(Comparator cmp,
Object[] src, int off, int len,
Object[] dest, int destOff)
{
if (len <= 1) {
if (len != 0)
dest[destOff] = src[off];
return;
}
int halfLen = len/2;
sort(cmp, src, off, halfLen, dest, destOff);
sort(cmp, src, off + halfLen, len - halfLen, dest, destOff + halfLen);
merge(cmp, src, off, halfLen, len - halfLen, dest, destOff);
}
private static void merge(Comparator cmp,
Object[] src, int off1, int len1, int len2,
Object[] dest, int destOff)
{
int off2 = off1 + len1;
if (len1 != 0 && len2 != 0) {
for (;;) {
if (cmp.compare(src[off1], src[off2]) <= 0) {
dest[destOff++] = src[off1++];
if (--len1 == 0)
break;
}
else {
dest[destOff++] = src[off2++];
if (--len2 == 0)
break;
}
}
}
for (; len1 > 0; --len1)
dest[destOff++] = src[off1++];
for (; len2 > 0; --len2)
dest[destOff++] = src[off2++];
}
}
| 30.383562
| 82
| 0.444545
|
2ff6f1208151b445e0387850c20d0f5b2222f672
| 1,159
|
package util;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* 日付に関するユーティリティクラス。
*/
public class DateUtil {
/**
* 現在の日付を文字列で取得する。
*
* @return 文字列。
*/
public static String nowToString() {
return toString(new Date());
}
/**
* 日付文字列から日付型を取得する。
*
* @param date 日付文字列。
* @return 日付型。
*/
public static Date parseString(String s) {
if (s == null) {
return null;
}
try {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy/MM/dd");
return sdf.parse(s);
} catch (ParseException e) {
return null;
}
}
/**
* 日付型から日付を文字列で取得する。
*
* @param date 日付型。
* @return 文字列。
*/
public static String toString(Date date) {
if (date == null) {
return "";
}
SimpleDateFormat sdf = new SimpleDateFormat("yyyy/MM/dd");
return sdf.format(date);
}
/**
* 時刻long値から日付を文字列で取得する。
*
* @param time 時刻long値。
* @return 文字列。
*/
public static String toString(long time) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy/MM/dd");
return sdf.format(time);
}
private DateUtil() {
}
}
| 17.044118
| 62
| 0.594478
|
e1a297a2b863f55dbc2eb7fc9cae25654c81c977
| 3,908
|
package cn.orange.core.net;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.text.TextUtils;
import com.google.gson.Gson;
import org.json.JSONException;
import org.json.JSONObject;
import java.lang.reflect.Type;
/**
* Created by Orange on 2019/4/1.
* Email:addskya@163.com
*/
public class Response<T> {
private static final int INVALID_CODE = -1;
private int code;
private String desc;
private T data;
Response(int code, String desc) {
this(code, desc, null);
}
Response(int code, String desc, T value) {
this.code = code;
this.desc = desc;
this.data = value;
}
public static <T> Response<T> success(T value) {
return new Response<>(200, null, value);
}
/**
* get the response code
*
* @param response the response from Web server
* @return the response code or {@code INVALID_CODE}
*/
public static int getResponseCode(@Nullable Response response) {
return response != null ? response.getCode() : INVALID_CODE;
}
/**
* 解析服务器返回的Code
*
* @param responseJson 服务器返回的json数据
* @return the code.
*/
static int getResponseCode(@NonNull String responseJson) {
final int invalidCode = INVALID_CODE;
if (TextUtils.isEmpty(responseJson)) {
return invalidCode;
}
final String KEY = "code";
JSONObject json = toJsonObject(responseJson);
return json != null ? json.optInt(KEY, invalidCode) : invalidCode;
}
/**
* 解析服务器返回的msg
*
* @param responseJson 服务器返回的json数据
* @return the meg
*/
static String getResponseMsg(@NonNull String responseJson) {
if (TextUtils.isEmpty(responseJson)) {
return null;
}
final String KEY = "desc";
JSONObject json = toJsonObject(responseJson);
return json != null ? json.optString(KEY, null) : null;
}
/**
* 解析服务器返回的value
*
* @param responseJson 服务器返回的json数据
* @param responseType 需要转换的类型
* @param <T> 需要转换的类型
* @return 转换后的数据
*/
static <T> T getResponseValue(@NonNull Gson gson,
@NonNull String responseJson,
@NonNull Type responseType) {
if (TextUtils.isEmpty(responseJson)) {
return null;
}
final String KEY = "data";
JSONObject json = toJsonObject(responseJson);
String value = json != null ? json.optString(KEY, null) : null;
// msg maybe null or "null"
if (TextUtils.isEmpty(value) || "null".equalsIgnoreCase(value)) {
return null;
}
return gson.fromJson(value, responseType);
}
/**
* 将字符串转换为JsonObject对象
*
* @param jsonString 字符串
* @return JsonObject对象
*/
private static JSONObject toJsonObject(@NonNull String jsonString) {
if (TextUtils.isEmpty(jsonString)) {
return null;
}
try {
return new JSONObject(jsonString);
} catch (JSONException e) {
e.printStackTrace();
}
return null;
}
/**
* Whether or NOT the response is SUCCESS
*
* @return true if response code is 200
*/
public boolean isSuccess() {
return code == 200;
}
/**
* 获取响应码,请使用 {@link Response#getResponseCode(Response)}
*
* @return 响应码
*/
private int getCode() {
return code;
}
public String getDesc() {
return desc;
}
public T getData() {
return data;
}
@Override
@NonNull
public String toString() {
return "Response{" +
"code=" + code +
", desc='" + desc + '\'' +
", data=" + data +
'}';
}
}
| 24.123457
| 74
| 0.563971
|
5f3cb97f8fe3cb5068100cdf3b5f00ede54f3bf2
| 5,019
|
/*
* oxTrust is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
*
* Copyright (c) 2014, Gluu
*/
package org.gluu.oxtrust.action.uma;
import java.io.Serializable;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import javax.faces.context.ExternalContext;
import javax.faces.context.FacesContext;
import javax.servlet.http.HttpServletResponse;
import org.codehaus.jettison.json.JSONObject;
import org.gluu.oxtrust.ldap.service.ImageService;
import org.gluu.oxtrust.ldap.service.ViewHandlerService;
import org.gluu.oxtrust.ldap.service.uma.ScopeDescriptionService;
import org.gluu.site.ldap.persistence.exception.LdapMappingException;
import org.jboss.seam.ScopeType;
import org.jboss.seam.annotations.In;
import org.jboss.seam.annotations.Logger;
import org.jboss.seam.annotations.Name;
import org.jboss.seam.annotations.Scope;
import org.jboss.seam.log.Log;
import org.xdi.model.GluuImage;
import org.xdi.oxauth.model.uma.persistence.ScopeDescription;
import org.xdi.util.io.FileDownloader;
import org.xdi.util.io.FileDownloader.ContentDisposition;
import org.xdi.util.io.ResponseHelper;
/**
* Action class for download scope descriptions
*
* @author Yuriy Movchan Date: 12/06/2012
*/
@Name("scopeDescriptionDownloadAction")
@Scope(ScopeType.EVENT)
public class ScopeDescriptionDownloadAction implements Serializable {
private static final long serialVersionUID = 6486111971437252913L;
@Logger
private Log log;
@In
protected ScopeDescriptionService scopeDescriptionService;
@In
protected ImageService imageService;
@In(value = "#{facesContext.externalContext}")
private ExternalContext externalContext;
@In(value = "#{facesContext}")
private FacesContext facesContext;
@In
private ViewHandlerService viewHandlerService;
private String scopeId;
private boolean download;
public void downloadFile() {
byte resultFile[] = null;
ScopeDescription scopeDescription = getScopeDescription();
if (scopeDescription != null) {
JSONObject jsonObject = new JSONObject();
try {
HashMap<String, List<String>> pageParams = new HashMap<String, List<String>>();
pageParams.put("scope", Arrays.asList(scopeDescription.getId()));
String umaScope = viewHandlerService.getBookmarkableURL("/uma/scope/scopeDescriptionFile.xhtml", pageParams);
jsonObject.put("name", scopeDescription.getId());
jsonObject.put("icon_uri", umaScope);
resultFile = jsonObject.toString().getBytes("UTF-8");
} catch (Exception ex) {
log.error("Failed to generate json response", ex);
}
}
if (resultFile == null) {
HttpServletResponse response = (HttpServletResponse) externalContext.getResponse();
FileDownloader.sendError(response, "Failed to generate json file");
} else {
ContentDisposition contentDisposition = download ? ContentDisposition.ATTACHEMENT : ContentDisposition.NONE;
ResponseHelper.downloadFile(scopeDescription.getId() + ".json", "application/json;charset=UTF-8", resultFile,
contentDisposition, facesContext);
}
}
public void downloadIcon() {
byte resultFile[] = null;
ScopeDescription scopeDescription = getScopeDescription();
if (scopeDescription != null) {
GluuImage gluuImage = imageService.getGluuImageFromXML(scopeDescription.getFaviconImageAsXml());
try {
resultFile = imageService.getThumImageData(gluuImage);
} catch (Exception ex) {
log.error("Failed to generate image response", ex);
}
}
if (resultFile == null) {
HttpServletResponse response = (HttpServletResponse) externalContext.getResponse();
FileDownloader.sendError(response, "Failed to prepare icon");
} else {
ContentDisposition contentDisposition = download ? ContentDisposition.ATTACHEMENT : ContentDisposition.NONE;
ResponseHelper.downloadFile(scopeDescription.getId() + ".jpg", "image/jpeg", resultFile, contentDisposition, facesContext);
}
}
private ScopeDescription getScopeDescription() {
try {
scopeDescriptionService.prepareScopeDescriptionBranch();
} catch (Exception ex) {
log.error("Failed to initialize download action", ex);
return null;
}
log.debug("Loading UMA scope description '{0}'", this.scopeId);
ScopeDescription scopeDescription;
try {
List<ScopeDescription> scopeDescriptions = scopeDescriptionService.findScopeDescriptionsById(this.scopeId);
if (scopeDescriptions.size() != 1) {
log.error("Failed to find scope description '{0}'. Found: '{1}'", this.scopeId, scopeDescriptions.size());
return null;
}
scopeDescription = scopeDescriptions.get(0);
} catch (LdapMappingException ex) {
log.error("Failed to find scope description '{0}'", ex, this.scopeId);
return null;
}
return scopeDescription;
}
public String getScopeId() {
return scopeId;
}
public void setScopeId(String scopeId) {
this.scopeId = scopeId;
}
public boolean isDownload() {
return download;
}
public void setDownload(boolean download) {
this.download = download;
}
}
| 30.603659
| 126
| 0.757521
|
1e7a7322da646e570618d50aebdca2a807090cb6
| 655
|
package util;
import java.security.MessageDigest;
public class SHA256 {
public static String getSHA256(String input) {
StringBuffer result = new StringBuffer();
try {
MessageDigest digest = MessageDigest.getInstance("SHA-256");
byte[] salt = "Hello! This is Salt.".getBytes();
digest.reset();
digest.update(salt);
byte[] chars = digest.digest(input.getBytes("UTF-8"));
for (int i = 0; i < chars.length; i ++) {
String hex = Integer.toHexString(0xff & chars[i]);
if (hex.length() == 1) result.append("0");
result.append(hex);
}
} catch (Exception e){
e.printStackTrace();
}
return result.toString();
}
}
| 25.192308
| 63
| 0.651908
|
38cccc0bd80acf94eaf10651ea5242a7c3a28b21
| 891
|
package com.imalive.api.Controller;
import com.imalive.api.Model.Base;
import com.imalive.api.Service.StatusService;
import lombok.RequiredArgsConstructor;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
@RestController
@RequestMapping("status")
@RequiredArgsConstructor
public class StatusController {
private final StatusService statusService;
@GetMapping(path = "/up")
public ResponseEntity<List<Base>> listAllRunning() {
return ResponseEntity.ok(statusService.listAllRunning());
}
@GetMapping(path = "/down")
public ResponseEntity<List<Base>> listAllNotRunning() {
return ResponseEntity.ok(statusService.listAllNotRunning());
}
}
| 30.724138
| 68
| 0.782267
|
ec69e4a7dcc683cf57fea9a9da4279e6fb93aca0
| 3,917
|
/*******************************************************************************
* Copyright (C) 2020 Snigdha Athaiya
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
******************************************************************************/
package tests.iisc.edu.pll.analysis.concurrent.dataflow;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Vector;
import iisc.edu.pll.analysis.Globals;
import iisc.edu.pll.data.Statement;
import iisc.edu.pll.data.lattice.TFunction;
import iisc.edu.pll.data.lattice.TFunctionFactory;
import iisc.edu.pll.data.lattice.lcp.LCPIDEFunction;
import iisc.edu.pll.data.lattice.lcp.LCPIDEFunctionFactory;
public class JoinedFunctionMapTest {
public static void main(String args[])
{
int size = 2;
Vector<Integer> v1 = new Vector<>();
v1.add(0);
v1.add(0);
Vector<Integer> v2 = new Vector<>();
v2.add(0);
v2.add(1);
Vector<Integer> v3 = new Vector<>();
v3.add(1);
v3.add(1);
ArrayList<String> vars = new ArrayList<>();
vars.add("a");
vars.add("b");
Globals.DVars = new ArrayList<>();
Globals.DVars.add(Globals.lambda);
Globals.DVars.addAll(vars);
Globals.numberOfCounters = 2;
ArrayList<String> arguments = new ArrayList<>();
arguments.add("a");
arguments.add("1,4");
ArrayList<String> arguments2 = new ArrayList<>();
arguments2.add("a");
arguments2.add("1,5");
ArrayList<String> arguments3 = new ArrayList<>();
arguments3.add("a");
arguments3.add("1,3");
LCPIDEFunction f1 = LCPIDEFunctionFactory.createFunction(Statement.CONSTASSIGN, arguments);
LCPIDEFunction f2 = LCPIDEFunctionFactory.createFunction(Statement.CONSTASSIGN, arguments2);
LCPIDEFunction f3 = LCPIDEFunctionFactory.createFunction(Statement.CONSTASSIGN, arguments3);
HashMap<Vector<Integer>, TFunction> smap = new HashMap<>();
smap.put(v1, f1);
smap.put(v2, f2);
smap.put(v3, f3);
TFunction joinedFunc = LCPIDEFunctionFactory.createFunction(Statement.ID, new ArrayList<>());
List<TFunction> ldfunc = getAllLower(smap, v3);
for(int i =0; i< ldfunc.size();i++)
joinedFunc = joinedFunc.join(ldfunc.get(i));
System.out.println(joinedFunc);
}
private static List<TFunction> getAllLower(HashMap<Vector<Integer>, TFunction> sourceMap, Vector<Integer> dVector) {
List<TFunction> ldFuncs = new ArrayList<>();
synchronized (sourceMap) {
for (Vector<Integer> vec : sourceMap.keySet()) {
synchronized (vec) {
if (isLower(vec, dVector))
ldFuncs.add(sourceMap.get(vec));
}
}
}
return ldFuncs;
}
// no need to lock dvector, as the vector content does not change
private static boolean isLower(Vector<Integer> vec, Vector<Integer> dVector) {
for (int i = 0; i < Globals.numberOfCounters; i++) {
if (vec.get(i) > dVector.get(i))
return false;
}
return true;
}
}
| 32.106557
| 117
| 0.686239
|
dde4af4a691601b3552d9728d7dab05d9c2a2161
| 3,092
|
package com.rawsanj.tweet.service;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.social.twitter.api.HashTagEntity;
import org.springframework.social.twitter.api.Stream;
import org.springframework.social.twitter.api.StreamDeleteEvent;
import org.springframework.social.twitter.api.StreamListener;
import org.springframework.social.twitter.api.StreamWarningEvent;
import org.springframework.social.twitter.api.Tweet;
import org.springframework.social.twitter.api.Twitter;
import org.springframework.stereotype.Service;
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;
@Service
public class StreamTweetEventService {
private final Logger log = LoggerFactory.getLogger(StreamTweetEventService.class);
@Autowired
private Twitter twitter;
private Stream userStream;
public void streamTweetEvent(List<SseEmitter> emitters) throws InterruptedException{
List<StreamListener> listeners = new ArrayList<StreamListener>();
StreamListener streamListener = new StreamListener() {
@Override
public void onWarning(StreamWarningEvent warningEvent) {
}
@Override
public void onTweet(Tweet tweet) {
//log.info("User '{}', Tweeted : {}, from ; {}", tweet.getUser().getName() , tweet.getText(), tweet.getUser().getLocation());
Integer connectedUsers = emitters.size();
log.info("Streaming to :" + connectedUsers +" connected Users");
if (connectedUsers!=0) {
for (SseEmitter emiter : emitters) {
try {
emiter.send(SseEmitter.event().name("streamLocation").data(tweet.getUser().getLocation()));
StringBuilder hashTag = new StringBuilder();
List<HashTagEntity> hashTags = tweet.getEntities().getHashTags();
for (HashTagEntity hash : hashTags) {
hashTag.append("#"+hash.getText() + " ");
}
//System.out.println(hashTag);
emiter.send(SseEmitter.event().name("streamHashtags").data(hashTag));
} catch (IOException e) {
System.out.println("User Disconnected from the Stream");
//e.printStackTrace();
}
}
}else{
//Close Stream when all Users are disconnected.
userStream.close();
}
}
@Override
public void onLimit(int numberOfLimitedTweets) {
}
@Override
public void onDelete(StreamDeleteEvent deleteEvent) {
}
};
//Start Stream when a User is connected
if (emitters.size()==1) {
listeners.add(streamListener);
userStream = twitter.streamingOperations().sample(listeners);
}
// Stream from a specific Location:
// Float west=-122.75f;
// Float south=36.8f;
// Float east=-121.75f;
// Float north = 37.8f;
//
// FilterStreamParameters filterStreamParameters = new FilterStreamParameters();
// filterStreamParameters.addLocation(west, south, east, north);
//Stream userStream = twitter.streamingOperations().filter(filterStreamParameters, listeners);
}
}
| 31.876289
| 129
| 0.718629
|
94e9dcc7fc8b1fb5dc67526fa209d84a82efc775
| 451
|
package com.baeldung.dddcontexts.ordercontext.service;
import com.baeldung.dddcontexts.ordercontext.model.CustomerOrder;
import com.baeldung.dddcontexts.ordercontext.repository.CustomerOrderRepository;
import com.baeldung.dddcontexts.sharedkernel.service.ApplicationService;
public interface OrderService extends ApplicationService {
void placeOrder(CustomerOrder order);
void setOrderRepository(CustomerOrderRepository orderRepository);
}
| 37.583333
| 80
| 0.86031
|
e82d21e9ebc2f00517721262ec1bc67a8b1976c7
| 3,340
|
package org.esfinge.aom.model.rolemapper.core;
import java.util.List;
import java.util.Map;
import java.util.WeakHashMap;
import org.esfinge.aom.api.model.IEntity;
import org.esfinge.aom.api.model.IProperty;
import org.esfinge.aom.api.model.IPropertyType;
import org.esfinge.aom.exceptions.EsfingeAOMException;
public class AdapterPropertyMap implements IProperty {
private Object dsObject;
private Object dsObjectKey;
private static Map<Object, AdapterPropertyMap> objectMap = new WeakHashMap<Object, AdapterPropertyMap>();
private AdapterPropertyMap (String propertyClass, Object dsPropertyKey, Object dsProperty) throws EsfingeAOMException
{
try
{
Class<?> clazz = Class.forName(propertyClass);
Object dsObj = dsProperty;
if (dsObj == null)
{
dsObj = clazz.newInstance();
}
dsObject = dsObj;
dsObjectKey = dsPropertyKey;
objectMap.put(dsPropertyKey, this);
}
catch (Exception e)
{
throw new EsfingeAOMException(e);
}
}
public static AdapterPropertyMap getAdapter(Object dsPropertyKey, Object dsProperty) throws EsfingeAOMException
{
try{
if (dsPropertyKey != null && dsProperty != null)
{
if (objectMap.containsKey(dsPropertyKey)){
AdapterPropertyMap apm = objectMap.get(dsPropertyKey);
if(apm.getValue() != dsProperty){
apm.setValue(dsProperty);
}
return apm;
}
return new AdapterPropertyMap(dsProperty.getClass().getName(), dsPropertyKey, dsProperty);
}
return null;
}catch(Exception e){
throw new EsfingeAOMException(e);
}
}
@Override
public IPropertyType getPropertyType() throws EsfingeAOMException {
return null;
}
@Override
public void setPropertyType(IPropertyType propertyType)
throws EsfingeAOMException {
}
@Override
public Object getValue() throws EsfingeAOMException {
if(dsObjectKey != null){
return dsObject;
}
return null;
}
@Override
public void setValue(Object value) throws EsfingeAOMException {
try{
Object valueToSet = value;
if (value instanceof IEntity)
{
IEntity entity = (IEntity)value;
valueToSet = entity.getAssociatedObject();
}
dsObject = valueToSet;
} catch(Exception e){
throw new EsfingeAOMException(e);
}
}
@Override
public Object getAssociatedObject() {
return dsObject;
}
@Override
public String getName() throws EsfingeAOMException {
try{
if(dsObjectKey != null){
return (String) dsObjectKey;
}
return null;
} catch (Exception e) {
throw new EsfingeAOMException(e);
}
}
@Override
public void setName(String value) throws EsfingeAOMException {
dsObjectKey = value;
}
public void removePropertyMap(String name) throws EsfingeAOMException {
objectMap.remove(name);
}
@Override
public List<IProperty> getProperties() throws EsfingeAOMException {
// TODO Auto-generated method stub
return null;
}
@Override
public void setProperty(String propertyName, Object propertyValue)
throws EsfingeAOMException {
// TODO Auto-generated method stub
}
@Override
public void removeProperty(String propertyName) throws EsfingeAOMException {
// TODO Auto-generated method stub
}
@Override
public IProperty getProperty(String propertyName)
throws EsfingeAOMException {
// TODO Auto-generated method stub
return null;
}
}
| 22.567568
| 118
| 0.724251
|
2462df26be48f3c37ead78b4a33edd8fded09c4d
| 451
|
package com.scalar.kelpie.modules.dummy;
import com.scalar.kelpie.config.Config;
import com.scalar.kelpie.modules.PostProcessor;
/** DummyPostProcessor is a dummy module of {@link PostProcessor} and executes nothing. */
public class DummyPostProcessor extends PostProcessor {
public DummyPostProcessor(Config config) {
super(config);
}
@Override
public void execute() {
// nothing to do
}
@Override
public void close() {}
}
| 21.47619
| 90
| 0.736142
|
f72a61a34e6b6ff100f10dcddefe851bec255dfd
| 6,076
|
/*
* Copyright (c) 2008-2016, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.simulator.provisioner;
import com.hazelcast.simulator.common.AgentsFile;
import com.hazelcast.simulator.common.SimulatorProperties;
import com.hazelcast.simulator.utils.Bash;
import com.hazelcast.simulator.utils.jars.HazelcastJARs;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
import joptsimple.OptionSpec;
import org.jclouds.compute.ComputeService;
import static com.hazelcast.simulator.common.SimulatorProperties.PROPERTIES_FILE_NAME;
import static com.hazelcast.simulator.utils.CliUtils.initOptionsWithHelp;
import static com.hazelcast.simulator.utils.CliUtils.printHelpAndExit;
import static com.hazelcast.simulator.utils.CloudProviderUtils.isCloudProvider;
import static com.hazelcast.simulator.utils.SimulatorUtils.loadSimulatorProperties;
import static com.hazelcast.simulator.utils.jars.HazelcastJARs.isPrepareRequired;
import static java.util.Collections.singleton;
final class ProvisionerCli {
private final OptionParser parser = new OptionParser();
private final OptionSpec<Integer> scaleSpec = parser.accepts("scale",
"Number of Simulator machines to scale to. If the number of machines already exists, the call is ignored. If the"
+ " desired number of machines is smaller than the actual number of machines, machines are terminated.")
.withRequiredArg().ofType(Integer.class);
private final OptionSpec installSpec = parser.accepts("install",
"Installs Simulator on all provisioned machines.");
private final OptionSpec uploadHazelcastSpec = parser.accepts("uploadHazelcast",
"If defined --install will upload the Hazelcast JARs as well.");
private final OptionSpec<Boolean> enterpriseEnabledSpec = parser.accepts("enterpriseEnabled",
"Use JARs of Hazelcast Enterprise Edition.")
.withRequiredArg().ofType(Boolean.class).defaultsTo(false);
private final OptionSpec listAgentsSpec = parser.accepts("list",
"Lists the provisioned machines (from " + AgentsFile.NAME + " file).");
private final OptionSpec<String> downloadSpec = parser.accepts("download",
"Download all files from the remote Worker directories. Use --clean to delete all Worker directories.")
.withOptionalArg().ofType(String.class).defaultsTo("workers");
private final OptionSpec cleanSpec = parser.accepts("clean",
"Cleans the remote Worker directories on the provisioned machines.");
private final OptionSpec killSpec = parser.accepts("kill",
"Kills the Java processes on all provisioned machines (via killall -9 java).");
private final OptionSpec terminateSpec = parser.accepts("terminate",
"Terminates all provisioned machines.");
private final OptionSpec<String> propertiesFileSpec = parser.accepts("propertiesFile",
"The file containing the Simulator properties. If no file is explicitly configured, first the local working directory"
+ " is checked for a file '" + PROPERTIES_FILE_NAME + "'. All missing properties are always loaded from"
+ " '$SIMULATOR_HOME/conf/" + PROPERTIES_FILE_NAME + "'.")
.withRequiredArg().ofType(String.class);
private ProvisionerCli() {
}
static Provisioner init(String[] args) {
ProvisionerCli cli = new ProvisionerCli();
OptionSet options = initOptionsWithHelp(cli.parser, args);
SimulatorProperties properties = loadSimulatorProperties(options, cli.propertiesFileSpec);
ComputeService computeService = (isCloudProvider(properties) ? new ComputeServiceBuilder(properties).build() : null);
Bash bash = new Bash(properties);
HazelcastJARs hazelcastJARs = null;
boolean enterpriseEnabled = options.valueOf(cli.enterpriseEnabledSpec);
if (options.has(cli.uploadHazelcastSpec)) {
String hazelcastVersionSpec = properties.getHazelcastVersionSpec();
if (isPrepareRequired(hazelcastVersionSpec) || !enterpriseEnabled) {
hazelcastJARs = HazelcastJARs.newInstance(bash, properties, singleton(hazelcastVersionSpec));
}
}
return new Provisioner(properties, computeService, bash, hazelcastJARs, enterpriseEnabled);
}
static void run(String[] args, Provisioner provisioner) {
ProvisionerCli cli = new ProvisionerCli();
OptionSet options = initOptionsWithHelp(cli.parser, args);
try {
if (options.has(cli.scaleSpec)) {
int size = options.valueOf(cli.scaleSpec);
provisioner.scale(size);
} else if (options.has(cli.installSpec)) {
provisioner.installSimulator();
} else if (options.has(cli.listAgentsSpec)) {
provisioner.listMachines();
} else if (options.has(cli.downloadSpec)) {
String dir = options.valueOf(cli.downloadSpec);
provisioner.download(dir);
} else if (options.has(cli.cleanSpec)) {
provisioner.clean();
} else if (options.has(cli.killSpec)) {
provisioner.killJavaProcesses();
} else if (options.has(cli.terminateSpec)) {
provisioner.terminate();
} else {
printHelpAndExit(cli.parser);
}
} finally {
provisioner.shutdown();
}
}
}
| 47.46875
| 130
| 0.695688
|
90c7a6dbe990b03d6432046eb42adacd101c4254
| 5,699
|
import java.awt.*;
import java.awt.event.*;
import acm.graphics.*;
import acm.program.*;
import acm.util.*;
import java.util.*;
import java.io.*;
import javax.swing.*;
import javax.swing.event.*;
/**
* This is a sample program showing you how to use GUI components.
* Note that it adds components to the WEST (left) of the screen.
* You could choose to add things elsewhere (EAST, SOUTH, NORTH).
*/
public class TryGUIs extends GraphicsProgram implements ChangeListener {
//We declare the various components - JButton, JComboBox, JSlider, JLabel,
// JTextfield
private JButton colorButton;
private JComboBox shapeChooser;
private JSlider sizeSlider;
private JCheckBox filled;
private JLabel sliderLabel;
private JTextField sliderValue;
//We also have some other variables we need in our program
private int size=50;
private boolean fillItIn=true;
private Color color=Color.RED;
private String currentShape="Square";
/**
* We arrange init() so it adds mouse and action listeners, and GUI components.
* Note each GUI component needs to be created, add appropriate listener, and,
* then, get added to the screen.
*/
public void init() {
addMouseListeners();
addActionListeners();
//add the color JButton. JButtons react to being pressed.
colorButton=new JButton("Change Color");
colorButton.addActionListener(this);
add(colorButton,WEST);
//add the shapeChooser JComboBox (a menu of sorts)
shapeChooser=new JComboBox();
shapeChooser.addItem("Square");
shapeChooser.addItem("Circle");
shapeChooser.addItem("Turtle");
shapeChooser.addActionListener(this);
add(shapeChooser,WEST);
//add the filled JCheckBox. JCheckBoxes have either true or false values.
filled=new JCheckBox("Filled");
filled.setSelected(true);
filled.addActionListener(this);
add(filled,WEST);
/* sizeSlider is a JSlider. They are normally used to allow users to pick a range
* of numerical values. Ours runs from 40 to 200 with default 50. We also specify
* that the slider will be oriented in the VERTICAL direction. (HORIZONTAL is the
* default orientation. */
sizeSlider=new JSlider(JSlider.VERTICAL,40,200,50);
//we can (optionally) add tick marks to the JSlider.
sizeSlider.setMajorTickSpacing(40);
sizeSlider.setPaintTicks(true);
//This wierd construct adds labels to the JSlider if we want them.
Hashtable<Integer,JLabel> labelTable = new Hashtable<Integer,JLabel>();
labelTable.put( new Integer( 40 ), new JLabel("40") );
labelTable.put( new Integer( 80 ), new JLabel("80") );
labelTable.put( new Integer( 120 ), new JLabel("120") );
labelTable.put(new Integer(160), new JLabel("160"));
labelTable.put(new Integer(200), new JLabel("200"));
sizeSlider.setLabelTable( labelTable );
sizeSlider.setPaintLabels(true);
sizeSlider.addChangeListener(this);
//We also add a JTextField so we can see the slider value we have chosen.
sliderValue=new JTextField(5);
sliderValue.setText("50");
//We also add a label "above" the slider.
sliderLabel=new JLabel("Size");
//add the label, slider, and textfield in that order.
add(sliderLabel,WEST);
add(sizeSlider,WEST);
add(sliderValue,WEST);
}
/**
* our run() does nothing except set the size.
* */
public void run() {
setSize(1000,600);
}
/**
* actionPerformed reacts to any of the standard components to determine what
* action to take. The ActionEvent e contains the "source" of the action so we
* use that to determine which action to do.
*/
public void actionPerformed(ActionEvent e) {
Object o=e.getSource();
/* If the object was the color button, we use the JColorChooser to pick the
* appropriate color.
*/
if(o.equals(colorButton)) {
color = JColorChooser.showDialog(
null,
"Choose Shape Color",
color);
}
//the JCheckBox returns true or false depending on whether it is checked
// or not.
else if(o.equals(filled)) {
fillItIn=filled.isSelected();
}
//The JComboBox shapeChooser can return the currently selected "label".
else if(o.equals(shapeChooser)) {
currentShape=(String) shapeChooser.getSelectedItem();
}
}
/**
* mouseClicked draws either a square or a circle (depending on the result
* of the shapeChooser) either filled or not filled (depending on the filled
* JCheckBox) in the color chosen by the colorButton at the location of the
* click of the mouse.
*/
public void mouseClicked(MouseEvent e) {
if(currentShape.equals("Square")) {
GRect gr=new GRect(e.getX(),e.getY(),size,size);
gr.setColor(color);
if(fillItIn) {
gr.setFilled(true);
gr.setFillColor(color);
}
add(gr);
}
else if(currentShape.equals("Circle")) {
GOval go=new GOval(e.getX(),e.getY(),size,size);
go.setColor(color);
if(fillItIn) {
go.setFilled(true);
go.setFillColor(color);
}
add(go);
}
else {
MyTurtle turtle=new MyTurtle(e.getX(),e.getY(),size);
turtle.setColor(color);
add(turtle);
}
}
/**
* For JSlider objects we use the stateChanged method instead of
* the actionPerformed method.
*/
public void stateChanged(ChangeEvent e) {
JSlider source = (JSlider)e.getSource();
size = (int)source.getValue();
sliderValue.setText(""+size);
}
/**
* As usual, we use main to ease launching of the program.
*/
public static void main(String [] args) {
new TryGUIs().start();
}
}
| 33.523529
| 85
| 0.669942
|
22fbe40e850ca4f08f7ec8050194b15d4f0165ee
| 307
|
package io.github.pleuvoir.message.channel.model.dto;
import java.io.Serializable;
import lombok.Data;
/**
* 短信渠道信息
* @author pleuvoir
*
*/
@Data
public class MsgChannelDTO implements Serializable {
private static final long serialVersionUID = 1307165110480329286L;
private String channelCode;
}
| 16.157895
| 67
| 0.771987
|
66a09efedb240518cf3067ab03c1fb86f5b8c4ec
| 505
|
/**
*
*/
package com.samton.erp.api.shop.constant;
/**
*
* @ClassName: ShopConstant
* @Description: 订单常用参数
* @author A18ccms a18ccms_gmail_com
* @date 2016年4月14日 上午10:20:30
*
*/
public final class ShopConstant {
public static final String WISH_ACCESS_TOKEN_URL = "https://merchant.wish.com/api/v2/oauth/access_token";
//平台类型---Aliexpress
public static final Short ALIEXPRESS = 1;
//平台类型---Wish
public static final Short WISH = 2;
//平台类型---其他
public static final Short OTHER = 0;
}
| 18.703704
| 106
| 0.70099
|
5d61af0e2c8e3155e9e6ae5e082505dce3e5a76f
| 508
|
package interfaz;
public class Crear_mensaje {
private event _enviar_mensaje;
private Label _nuevoMensajeTitulo;
private Label _asuntoL;
private TexField _asuntoTF;
private Label _mensajeL;
private TextView _mensajeTV;
private Button _cancelar;
private Button _enviarMensaje;
public Correo__Usuario_registrado_ _correo__Usuario_registrado_;
public void Enviar_mensaje() {
throw new UnsupportedOperationException();
}
public void cancelar() {
throw new UnsupportedOperationException();
}
}
| 24.190476
| 65
| 0.812992
|
ef1303674f8735b49b39396c313eef9c8e6b59b4
| 7,470
|
package org.broadinstitute.hellbender.tools.copynumber.models;
import htsjdk.samtools.SAMSequenceDictionary;
import htsjdk.samtools.SAMSequenceRecord;
import org.apache.commons.math3.random.RandomGenerator;
import org.apache.commons.math3.random.RandomGeneratorFactory;
import org.broadinstitute.hellbender.GATKBaseTest;
import org.broadinstitute.hellbender.tools.copynumber.formats.collections.ParameterDecileCollection;
import org.broadinstitute.hellbender.tools.copynumber.formats.metadata.SampleLocatableMetadata;
import org.broadinstitute.hellbender.tools.copynumber.formats.metadata.SimpleSampleLocatableMetadata;
import org.broadinstitute.hellbender.tools.copynumber.formats.records.ModeledSegment;
import org.broadinstitute.hellbender.utils.mcmc.Decile;
import org.broadinstitute.hellbender.utils.mcmc.DecileCollection;
import org.testng.Assert;
import org.testng.annotations.Test;
import java.util.List;
import java.util.Random;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
/**
* Tests the MCMC inference performed by {@link AlleleFractionModeller}. Only recovery of posterior centers is tested.
*
* @author David Benjamin <davidben@broadinstitute.org>
* @author Samuel Lee <slee@broadinstitute.org>
*/
public final class AlleleFractionModellerUnitTest extends GATKBaseTest {
private static final int RANDOM_SEED = 13;
// note: the following tolerance could actually be made much smaller if we used more segments and/or
// more hets -- most of the error is the sampling error of a finite simulated data set, not numerical error of MCMC
private static final double ABSOLUTE_TOLERANCE = 0.01;
@Test
public void testMCMC() {
final double meanBias = 1.2;
final double biasVariance = 0.04;
final double outlierProbability = 0.02;
final AlleleFractionGlobalParameters globalParameters = new AlleleFractionGlobalParameters(meanBias, biasVariance, outlierProbability);
final double minorAlleleFractionPriorAlpha = 1.;
final AlleleFractionPrior prior = new AlleleFractionPrior(minorAlleleFractionPriorAlpha);
final int numSegments = 50;
final double averageHetsPerSegment = 50.;
final double averageDepth = 50.;
final int numSamples = 150;
final int numBurnIn = 50;
final RandomGenerator rng = RandomGeneratorFactory.createRandomGenerator(new Random(RANDOM_SEED));
final SampleLocatableMetadata metadata = new SimpleSampleLocatableMetadata(
"test-sample",
new SAMSequenceDictionary(IntStream.range(0, numSegments)
.mapToObj(i -> new SAMSequenceRecord("chr" + i + 1, 10000))
.collect(Collectors.toList())));
final AlleleFractionSimulatedData simulatedData = new AlleleFractionSimulatedData(
metadata, globalParameters, numSegments, averageHetsPerSegment, averageDepth, rng);
final AlleleFractionModeller modeller = new AlleleFractionModeller(simulatedData.getData().getAllelicCounts(), simulatedData.getData().getSegments(), prior);
modeller.fitMCMC(numSamples, numBurnIn);
assertAlleleFractionPosteriorCenters(modeller, simulatedData);
}
static void assertAlleleFractionPosteriorCenters(final AlleleFractionModeller modeller,
final AlleleFractionSimulatedData simulatedData) {
final AlleleFractionState trueState = simulatedData.getTrueState();
final int numSegments = simulatedData.getData().getNumSegments();
//check centers from samples
final List<Double> meanBiasSamples = modeller.getMeanBiasSamples();
final List<Double> biasVarianceSamples = modeller.getBiasVarianceSamples();
final List<Double> outlierProbabilitySamples = modeller.getOutlierProbabilitySamples();
final List<AlleleFractionState.MinorFractions> minorFractionsSamples = modeller.getMinorFractionsSamples();
Assert.assertEquals(numSegments, minorFractionsSamples.get(0).size());
final List<List<Double>> minorFractionsSamplesBySegment = IntStream.range(0, numSegments)
.mapToObj(i -> minorFractionsSamples.stream().map(s -> s.get(i)).collect(Collectors.toList()))
.collect(Collectors.toList());
final double meanBiasResult = meanBiasSamples.stream().mapToDouble(x -> x).average().getAsDouble();
final double biasVarianceResult = biasVarianceSamples.stream().mapToDouble(x -> x).average().getAsDouble();
final double outlierProbabilityResult = outlierProbabilitySamples.stream().mapToDouble(x -> x).average().getAsDouble();
final List<Double> minorFractionsResult = minorFractionsSamplesBySegment
.stream().map(list -> list.stream().mapToDouble(x -> x).average().getAsDouble())
.collect(Collectors.toList());
final double totalSegmentError = IntStream.range(0, numSegments)
.mapToDouble(s -> Math.abs(minorFractionsResult.get(s) - trueState.segmentMinorFraction(s)))
.sum();
Assert.assertEquals(meanBiasResult, trueState.meanBias(), ABSOLUTE_TOLERANCE);
Assert.assertEquals(biasVarianceResult, trueState.biasVariance(), ABSOLUTE_TOLERANCE);
Assert.assertEquals(outlierProbabilityResult, trueState.outlierProbability(), ABSOLUTE_TOLERANCE);
Assert.assertEquals(totalSegmentError / numSegments, 0.0, ABSOLUTE_TOLERANCE);
//check centers from summaries
final ParameterDecileCollection<AlleleFractionParameter> globalParameterDeciles = modeller.getGlobalParameterDeciles();
final DecileCollection meanBiasDeciles = globalParameterDeciles.getDeciles(AlleleFractionParameter.MEAN_BIAS);
final double meanBiasPosteriorCenter = meanBiasDeciles.get(Decile.DECILE_50);
Assert.assertEquals(meanBiasPosteriorCenter, trueState.meanBias(), ABSOLUTE_TOLERANCE);
final DecileCollection biasVarianceDeciles = globalParameterDeciles.getDeciles(AlleleFractionParameter.BIAS_VARIANCE);
final double biasVariancePosteriorCenter = biasVarianceDeciles.get(Decile.DECILE_50);
Assert.assertEquals(biasVariancePosteriorCenter, trueState.biasVariance(), ABSOLUTE_TOLERANCE);
final DecileCollection outlierProbabilityDeciles = globalParameterDeciles.getDeciles(AlleleFractionParameter.OUTLIER_PROBABILITY);
final double outlierProbabilityPosteriorCenter = outlierProbabilityDeciles.get(Decile.DECILE_50);
Assert.assertEquals(outlierProbabilityPosteriorCenter, trueState.outlierProbability(), ABSOLUTE_TOLERANCE);
final List<ModeledSegment.SimplePosteriorSummary> minorFractionsPosteriorSummaries = modeller.getMinorAlleleFractionsPosteriorSummaries();
Assert.assertEquals(numSegments, minorFractionsPosteriorSummaries.size());
final List<Double> minorFractionsPosteriorCenters = minorFractionsPosteriorSummaries.stream().map(ModeledSegment.SimplePosteriorSummary::getDecile50).collect(Collectors.toList());
double totalPosteriorCentersSegmentError = 0.0;
for (int segment = 0; segment < numSegments; segment++) {
totalPosteriorCentersSegmentError += Math.abs(minorFractionsPosteriorCenters.get(segment) - trueState.segmentMinorFraction(segment));
}
Assert.assertEquals(totalPosteriorCentersSegmentError / numSegments, 0.0, ABSOLUTE_TOLERANCE);
}
}
| 62.25
| 187
| 0.757296
|
17cb0ddde0d3d8d14676477afb487ad035a0f2da
| 3,190
|
package org.firstinspires.ftc.teamcode.Fusion4133;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.SwitchableLight;
/**
* Created by Fusion on 11/8/2017.
*/
//this is our runnable teleop program that we use in competition
@Disabled
@TeleOp (name = "Hadron : TeleOp", group = "Hadron")
public class HadronTeleOp extends OpMode {
//this is how we can pull code from the hadron hardware setup
MecanumHardwareSetup hadron = new MecanumHardwareSetup();
boolean firstTime = true;
int liftHoldVal;
@Override
public void init() {
hadron.hardwareInit(hardwareMap);
//this turns off the color sensors light in TeleOp
if (hadron.color instanceof SwitchableLight) {
((SwitchableLight) hadron.color).enableLight(false);
}
//this just set the lift hold value to the position it is at at the beginning of the
liftHoldVal = hadron.liftDrive.getCurrentPosition();
}
@Override
//this is just so when we hit the stop button at any point in the program the robot will stop
public void stop() {
super.stop();
}
@Override
public void loop() {
double xVal;
double yVal;
double zVal;
double csInc = .04;
//because of rule changes that when in the Init phase we can't have moving parts, this makes it so the robot does not move until the play button has been touched.
if (firstTime){
hadron.servoInit(hardwareMap);
}
firstTime = false;
//these allows us to have a bit neater code
xVal = gamepad1.left_stick_x;
yVal = -gamepad1.left_stick_y;
zVal = gamepad1.right_stick_x;
hadron.leftFrontDrive.setPower(yVal + xVal + zVal); //Next four rows are just for strafing
hadron.leftBackDrive.setPower(yVal - xVal + zVal);
hadron.rightFrontDrive.setPower(yVal - xVal - zVal);
hadron.rightBackDrive.setPower(yVal + xVal - zVal);
hadron.collectionMotorLeft.setPower(gamepad2.right_trigger); //Next four lines for opening and closing the collection motors.
hadron.collectionMotorRight.setPower(gamepad2.right_trigger);
hadron.collectionMotorLeft.setPower(-gamepad2.left_trigger);
hadron.collectionMotorRight.setPower(-gamepad2.left_trigger);
//val .2 is there because we want to treat the joystick instead of analog to a button like device.
//liftHoldVal is what the position the lift was upon stopping the lift
if (Math.abs(gamepad2.left_stick_y) >= .2) {
hadron.liftDrive.setPower(gamepad2.left_stick_y);
liftHoldVal = hadron.liftDrive.getCurrentPosition();
}
//this if is so that when we let go of the joystick controlling the lift the lift does not lower itself.
else {
hadron.liftDrive.setPower((double) (liftHoldVal - hadron.liftDrive.getCurrentPosition()) / 2000.0);
}
hadron.relicArm.setPower(gamepad2.right_stick_y);
telemetry.addData("lift ENC ","value: %7d", hadron.liftDrive.getCurrentPosition());
telemetry.addData("lift power","val: %2f",gamepad2.left_stick_y);
//telemetry.addData("hold val", "Val: %2i", liftHoldVal);
}
}
| 37.093023
| 166
| 0.726646
|
a9e4233212fb5e43501471ca2d481e2edc1ffe0e
| 176
|
package android.app;
public class ActivityManagerNative {
public static IActivityManager mgr;
public static IActivityManager getDefault() {
return mgr;
}
}
| 12.571429
| 47
| 0.721591
|
83aee69b562e1d19301619c91d8f69b72b08a43f
| 36,352
|
/**
* Copyright (c) 2012, 2013, Huawei Technologies Co., Ltd.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.huawei.ipr.pof.manager.database;
import java.io.BufferedReader;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedBlockingQueue;
import org.openflow.protocol.OFCounter;
import org.openflow.protocol.OFCounter.OFCounterModCmd;
import org.openflow.protocol.OFFeaturesReply;
import org.openflow.protocol.OFFlowMod;
import org.openflow.protocol.OFGroupMod;
import org.openflow.protocol.OFMessage;
import org.openflow.protocol.OFMeterMod;
import org.openflow.protocol.OFPortStatus;
import org.openflow.protocol.OFType;
import org.openflow.protocol.action.OFAction;
import org.openflow.protocol.action.OFActionType;
import org.openflow.protocol.table.OFDataTable;
import org.openflow.protocol.table.OFFlowTable;
import org.openflow.protocol.table.OFFlowTableResource;
import org.openflow.protocol.table.OFTableType;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.huawei.ipr.pof.manager.IPMService;
/**
* SMSwitchDatabase is a part of SMDatabase, it stores data belonged to a switch.
* <br>
* SMSwitchDatabase stores:
* <ul>
* <li>Ports</li>
* <li>TableResource</li>
* <li>Flow Tables</li>
* <li>Flow Entries (stored in {@link PMFlowTableDatabase})</li>
* <li>Counters</li>
* <li>GroupMods</li>
* <li>MeterMods</li>
* <li>LoggedMessages</li>
* <li>SentMessage (for roll back)</li>
* </ul>
*
* @author Song Jian (jack.songjian@huawei.com), Huawei Technologies Co., Ltd.
*
*/
public class PMSwitchDatabase {
protected int deviceId;
protected OFFeaturesReply switchFeatures;
protected Map<Integer,OFPortStatus> portsMap;
protected OFFlowTableResource flowTableResource;
protected Map<Byte, OFFlowTable> flowTablesMap; //<globalTableId, table>
protected Map<Byte, PMFlowTableDatabase> flowTableDatabaseMap; //<globaltableId, SMFlowTableDatabase>
protected Map<OFTableType, Byte> flowTableNoBaseMap; //<tableType, NumberBase>
protected Map<OFTableType, Byte> flowTableNoMap; //<tableType, globalTableId>
protected Map<OFTableType, List<Byte>> freeFlowTableIDListMap;
protected OFDataTable<OFCounter> counterTable; //<id, counterEntry>
protected OFDataTable<OFGroupMod> groupTable; //<id, groupEntry>
protected OFDataTable<OFMeterMod> meterTable; //<id, meterEntry>
protected Queue<OFMessage> ofmInLogList;
protected Map<OFType, Queue<OFMessage>> ofmInLogMap;
protected Queue<OFMessage> ofmOutLogList;
protected Map<OFType, Queue<OFMessage>> ofmOutLogMap;
protected Queue<OFMessage> sendedOfmQueue;
protected Map<Integer, OFMessage> oldBackupOfmMap; //<sended_Ofm_xid, oldOFMessage>
public final static int QUEUE_LOG_SIZE_MAXIMAL = 100;
public final static int QUEUE_SENDED_SIZE_MAXIMAL = 20;
public PMSwitchDatabase(int switchId) {
deviceId = switchId;
portsMap = new ConcurrentHashMap<Integer,OFPortStatus>();
flowTablesMap = new ConcurrentHashMap<Byte, OFFlowTable>();
flowTableDatabaseMap = new ConcurrentHashMap<Byte, PMFlowTableDatabase>();
flowTableNoBaseMap = new ConcurrentHashMap<OFTableType, Byte>();
flowTableNoMap = new ConcurrentHashMap<OFTableType, Byte>();
freeFlowTableIDListMap = new ConcurrentHashMap<OFTableType, List<Byte>>();
counterTable = new OFDataTable<OFCounter>(IPMService.COUNTERID_START);
groupTable = new OFDataTable<OFGroupMod>(IPMService.GROUPID_START);
meterTable = new OFDataTable<OFMeterMod>(IPMService.COUNTERID_START);
ofmInLogList = new LinkedBlockingQueue<OFMessage>(QUEUE_LOG_SIZE_MAXIMAL);
ofmOutLogList = new LinkedBlockingQueue<OFMessage>(QUEUE_LOG_SIZE_MAXIMAL);
ofmInLogMap = new ConcurrentHashMap<OFType, Queue<OFMessage>>();
ofmOutLogMap = new ConcurrentHashMap<OFType, Queue<OFMessage>>();
sendedOfmQueue = new LinkedBlockingQueue<OFMessage>(QUEUE_SENDED_SIZE_MAXIMAL);
oldBackupOfmMap = new ConcurrentHashMap<Integer, OFMessage>();
}
public void iAddSendedOFMessage(OFMessage message){
if(sendedOfmQueue.size() >= QUEUE_LOG_SIZE_MAXIMAL){
OFMessage polledMsg = sendedOfmQueue.poll();
oldBackupOfmMap.remove( polledMsg.getXid() );
}
sendedOfmQueue.offer(message);
}
public Queue<OFMessage> iGetSendedOFMessageQueue(){
return sendedOfmQueue;
}
public OFMessage iGetSendedOFMessage(int xid){
for(OFMessage message : sendedOfmQueue){
if(message.getXid() == xid){
return message;
}
}
return null;
}
public void iDelSendedOFMessage(OFMessage message){
sendedOfmQueue.remove(message);
}
public void iAddOldBackupMessage(int sended_msg_xid, OFMessage message){
oldBackupOfmMap.put(sended_msg_xid, message);
}
public OFMessage iGetOldBackupMessage(int xid){
return oldBackupOfmMap.get(xid);
}
public void iDelOldBackupMessage(int xid){
oldBackupOfmMap.remove(xid);
}
public void iLogOFMessageIn(OFMessage message){
if(ofmInLogList.size() >= QUEUE_LOG_SIZE_MAXIMAL){
ofmInLogList.poll();
}
ofmInLogList.offer(message);
}
public void iLogOFMessageIn(OFType type, OFMessage message){
if(false == ofmInLogMap.containsKey(type)){
ofmInLogMap.put(type, new LinkedBlockingQueue<OFMessage>(QUEUE_LOG_SIZE_MAXIMAL));
}
Queue<OFMessage> queue = ofmInLogMap.get(type);
if(queue.size() >= QUEUE_LOG_SIZE_MAXIMAL){
queue.poll();
}
queue.offer(message);
}
public void iLogOFMessageOut(OFMessage message){
if(ofmOutLogList.size() >= QUEUE_LOG_SIZE_MAXIMAL){
ofmOutLogList.poll();
}
ofmOutLogList.offer(message);
}
public void iLogOFMessageOut(OFType type, OFMessage message){
if(false == ofmOutLogMap.containsKey(type)){
ofmOutLogMap.put(type, new LinkedBlockingQueue<OFMessage>(QUEUE_LOG_SIZE_MAXIMAL));
}
Queue<OFMessage> queue = ofmOutLogMap.get(type);
if(queue.size() >= QUEUE_LOG_SIZE_MAXIMAL){
queue.poll();
}
queue.offer(message);
}
public Queue<OFMessage> iGetLogOFMessageInAll(){
return ofmInLogList;
}
public Queue<OFMessage> iGetLogOFMessageIn(OFType type){
return ofmInLogMap.get(type);
}
public Queue<OFMessage> iGetLogOFMessageOutAll(){
return ofmOutLogList;
}
public Queue<OFMessage> iGetLogOFMessageOut(OFType type){
return ofmOutLogMap.get(type);
}
public OFPortStatus getPort(int portId){
return portsMap.get(portId);
}
public void putPort(int portId, OFPortStatus port){
portsMap.put(portId, port);
}
public OFFlowTable getFlowTable(byte globalTableId){
return flowTablesMap.get(globalTableId);
}
public void putFlowTable(byte globalTableId, OFFlowTable flowTable){
flowTablesMap.put(globalTableId, flowTable);
}
public void removeFlowTable(byte globalTableId){
flowTablesMap.remove(globalTableId);
}
public PMFlowTableDatabase getFlowTableDatabase(byte globalTableId){
return flowTableDatabaseMap.get(globalTableId);
}
public void putNewFlowTableDatabse(byte globalTableId){
flowTableDatabaseMap.put(globalTableId, new PMFlowTableDatabase(globalTableId));
}
public void removeFlowTableDatabse(byte globalTableId){
flowTableDatabaseMap.remove(globalTableId);
}
public int allocCounterId(){
int newCounterID = counterTable.alloc();
OFCounter newCounter = new OFCounter();
newCounter.setCommand(OFCounterModCmd.OFPCC_ADD);
putCounter(newCounterID, newCounter);
return newCounterID;
}
public OFCounter getCounter(int counterId){
return counterTable.get(counterId);
}
public void putCounter(int counterId, OFCounter counter){
counterTable.put(counterId, counter);
}
public OFCounter removeCounter(int counterId){
return counterTable.remove(counterId);
}
public int allocGroupId(){
return groupTable.alloc();
}
public OFGroupMod getGroup(int groupId){
return groupTable.get(groupId);
}
public void putGroup(int groupId, OFGroupMod group){
groupTable.put(groupId, group);
}
public OFGroupMod removeGroup(int groupId){
return groupTable.remove(groupId);
}
public class GroupComparatpr<T> implements Comparator<OFGroupMod> {
public int compare(OFGroupMod group1, OFGroupMod group2) {
if(group1 == null || group2 == null){
return 0;
}
return group1.getGroupId() - group2.getGroupId();
}
}
GroupComparatpr<OFGroupMod> groupComp = new GroupComparatpr<OFGroupMod>();
public List<OFGroupMod> getAllGroupList(){
List<OFGroupMod> groupList = Collections.synchronizedList(new ArrayList<OFGroupMod>());
try{
Map<Integer, OFGroupMod> hashMap = groupTable.getAllData();
Iterator<Integer> iter = hashMap.keySet().iterator();
int index;
while(iter.hasNext()){
index = iter.next();
groupList.add(hashMap.get(index));
}
Collections.sort(groupList, groupComp);
}catch(Exception e){
e.printStackTrace();
}
return groupList;
}
public int allocMeterId(){
return meterTable.alloc();
}
public OFMeterMod getMeter(int meterId){
return meterTable.get(meterId);
}
public void putMeter(int meterId, OFMeterMod meter){
meterTable.put(meterId, meter);
}
public OFMeterMod removeMeter(int meterId){
return meterTable.remove(meterId);
}
public class MeterComparatpr<T> implements Comparator<OFMeterMod> {
public int compare(OFMeterMod meter1, OFMeterMod meter2) {
if(meter1 == null || meter2 == null){
return 0;
}
return meter1.getMeterId() - meter2.getMeterId();
}
}
MeterComparatpr<OFMeterMod> meterComp = new MeterComparatpr<OFMeterMod>();
public List<OFMeterMod> getAllMeterList(){
List<OFMeterMod> meterList = Collections.synchronizedList(new ArrayList<OFMeterMod>());
try{
Map<Integer, OFMeterMod> hashMap = meterTable.getAllData();
Iterator<Integer> iter = hashMap.keySet().iterator();
int index;
while(iter.hasNext()){
index = iter.next();
meterList.add(hashMap.get(index));
}
Collections.sort(meterList, meterComp);
}catch(Exception e){
e.printStackTrace();
}
return meterList;
}
public int getDeviceId() {
return deviceId;
}
public void setDeviceId(int deviceId) {
this.deviceId = deviceId;
}
public OFFeaturesReply getSwitchFeatures() {
return switchFeatures;
}
public void setSwitchFeatures(OFFeaturesReply switchFeatures) {
this.switchFeatures = switchFeatures;
}
public Map<Integer, OFPortStatus> getPortsMap() {
return portsMap;
}
public OFFlowTableResource getFlowTableResource() {
return flowTableResource;
}
public void setFlowTableResource(OFFlowTableResource flowTableResource) {
if(flowTableResource == null){
return;
}
this.flowTableResource = flowTableResource;
counterTable.setMaxNumber(flowTableResource.getCounterNum());
groupTable.setMaxNumber(flowTableResource.getGroupNum());
meterTable.setMaxNumber(flowTableResource.getMeterNum());
}
public Map<Byte, OFFlowTable> getFlowTablesMap() {
return flowTablesMap;
}
public Map<Integer, OFFlowMod> getFlowEntriesMap(byte globalTableId) {
if(null == flowTableDatabaseMap.get(globalTableId)){
return null;
}
return flowTableDatabaseMap.get(globalTableId).getFlowEntriesMap();
}
public byte getNewFlowTableID(byte tableType) {
byte newFlowTableID = IPMService.FLOWTABLEID_INVALID;
try{
OFTableType ofTableType = OFTableType.values()[tableType];
if(null == freeFlowTableIDListMap
|| null == freeFlowTableIDListMap.get(ofTableType)
|| 0 == freeFlowTableIDListMap.get(ofTableType).size()){
newFlowTableID = this.flowTableNoMap.get(ofTableType);
this.setFlowTableNo(tableType, (byte) (newFlowTableID + 1));
}else{
newFlowTableID = freeFlowTableIDListMap.get(ofTableType).remove(0);
}
}catch(Exception e){
e.printStackTrace();
}
return newFlowTableID;
}
public void setFlowTableNo(byte tableType, byte flowTableNo) {
this.flowTableNoMap.put(OFTableType.values()[tableType], flowTableNo);
}
public byte getFlowTableNoBase(byte tableType) {
try{
return this.flowTableNoBaseMap.get(OFTableType.values()[tableType]);
}catch(Exception e){
e.printStackTrace();
return IPMService.FLOWTABLEID_INVALID;
}
}
public void setFlowTableNoBase(byte tableType, byte flowTableNoBase) {
this.flowTableNoBaseMap.put(OFTableType.values()[tableType], flowTableNoBase);
this.freeFlowTableIDListMap.put(OFTableType.values()[tableType], Collections.synchronizedList(new ArrayList<Byte>()));
}
public class ByteComparatpr<T> implements Comparator<Byte> {
public int compare(Byte id1, Byte id2) {
return id1 - id2;
}
}
ByteComparatpr<Byte> byteComp = new ByteComparatpr<Byte>();
public void addFreeFlowTableID(byte tableType, byte flowTableID){
List<Byte> freeIdList = freeFlowTableIDListMap.get(OFTableType.values()[tableType]);
freeIdList.add(flowTableID);
Collections.sort(freeIdList, byteComp);
}
public int getTableNumber(OFTableType tableType){
try{
return flowTableNoMap.get(tableType) - flowTableNoBaseMap.get(tableType) - freeFlowTableIDListMap.get(tableType).size();
}catch(Exception e){
e.printStackTrace();
return 0;
}
}
public int getAllTableNumber(){
return flowTablesMap.size();
}
public int getUsedCounterNumber(){
return counterTable.usedSize();
}
public int getUsedGroupNumber(){
return groupTable.usedSize();
}
public int getUsedMeterNumber(){
return meterTable.usedSize();
}
private boolean saveGroupTableIntoFile(OutputStream out){
Gson gson = new Gson();
String string;
java.lang.reflect.Type type;
try {
string = gson.toJson("#Group#", "#Group#".getClass());
out.write(string.getBytes());
out.write('\n');
type = new TypeToken<List<Integer>>(){}.getType();
string = gson.toJson(groupTable.getFreeIdList(), type);
out.write(string.getBytes());
out.write('\n');
string = gson.toJson(groupTable.getEntryIdNo(), int.class);
out.write(string.getBytes());
out.write('\n');
string = gson.toJson(groupTable.getMaxNumber(), int.class);
out.write(string.getBytes());
out.write('\n');
string = gson.toJson(groupTable.getStartNo(), int.class);
out.write(string.getBytes());
out.write('\n');
Iterator<Integer> groupTableItor = groupTable.getAllData().keySet().iterator();
Map<Integer, OFGroupMod> groupTableMap = groupTable.getAllData();
int groupId;
OFGroupMod ofGroup;
String groupIdFlagString;
while(groupTableItor.hasNext()){
groupId = groupTableItor.next();
ofGroup = groupTableMap.get(groupId);
groupIdFlagString = "#groupid#" + groupId;
out.write(groupIdFlagString.getBytes());
out.write('\n');
if(false == saveGroupIntoFile(out, ofGroup)){
return false;
}
}
}catch (Exception e) {
e.printStackTrace();
return false;
}
return true;
}
private boolean loadGroupTableFromFile(BufferedReader br, List<String> returnedCurLineString) {
Gson gson = new Gson();
String lineString;
java.lang.reflect.Type type;
try {
lineString = br.readLine();
if(!gson.fromJson(lineString, String.class).equals("#Group#")){
return false;
}
//read freeIdList
lineString = br.readLine();
type = new TypeToken<List<Integer>>(){}.getType();
List<Integer> freeIdList = gson.fromJson(lineString, type);
//read flowTableId
lineString = br.readLine();
int entryId = gson.fromJson(lineString, int.class);
//read maxNumber
lineString = br.readLine();
int maxNumber = gson.fromJson(lineString, int.class);
//read startNo
lineString = br.readLine();
int startNo = gson.fromJson(lineString, int.class);
int pos;
int groupId;
String typeString;
String actionString;
OFGroupMod ofGroup;
List<OFAction> actionList;
OFActionType actionType;
OFAction action;
Map<Integer, OFGroupMod> groupTableMap = new ConcurrentHashMap<Integer, OFGroupMod>();
lineString = br.readLine();
while(null != lineString && lineString.contains("#groupid#") ){
groupId = (int)Long.parseLong( lineString.substring("#groupid#".length()));
ofGroup = new OFGroupMod();
ofGroup.setLengthU(OFGroupMod.MAXIMAL_LENGTH);
//read group command
lineString = br.readLine();
ofGroup.setCommand( gson.fromJson(lineString, byte.class) );
//read group type
lineString = br.readLine();
ofGroup.setGroupType( gson.fromJson(lineString, byte.class) );
//read group id
lineString = br.readLine();
ofGroup.setGroupId( gson.fromJson(lineString, int.class) );
//read group counter id
lineString = br.readLine();
ofGroup.setCounterId( gson.fromJson(lineString, int.class) );
//read ofGroup ActionList
lineString = br.readLine();
actionList = Collections.synchronizedList(new ArrayList<OFAction>());
while (null != lineString && lineString.contains("#action.")) {
pos = lineString.indexOf("#", "#action.".length());
typeString = lineString.substring("#action.".length(), pos);
actionString = lineString.substring(pos + 1);
actionType = OFActionType.valueOf(typeString);
action = gson.fromJson(actionString, actionType.toClass());
actionList.add(action);
lineString = br.readLine();
}//while: read actions
//set action list
if (null == ofGroup.getActionList()) {
ofGroup.setActionList(actionList);
}
//set group action num
ofGroup.setActionNum((byte) (actionList.size()));
//put the groupMod to GroupTableMap
groupTableMap.put(groupId, ofGroup);
}//while: read GroupEntry
groupTable = new OFDataTable<OFGroupMod>(groupTableMap, freeIdList, entryId, maxNumber, startNo);
returnedCurLineString.add(0, lineString);
}catch (Exception e) {
e.printStackTrace();
return false;
}
returnedCurLineString.add(0, lineString);
return true;
}
private boolean saveGroupIntoFile(OutputStream out, OFGroupMod ofGroup) {
Gson gson = new Gson();
String string;
//java.lang.reflect.Type type;
try {
string = gson.toJson(ofGroup.getCommand(), byte.class);
out.write(string.getBytes());
out.write('\n');
string = gson.toJson(ofGroup.getGroupType(), byte.class);
out.write(string.getBytes());
out.write('\n');
string = gson.toJson(ofGroup.getGroupId(), int.class);
out.write(string.getBytes());
out.write('\n');
string = gson.toJson(ofGroup.getCounterId(), int.class);
out.write(string.getBytes());
out.write('\n');
for(OFAction action : ofGroup.getActionList()){
string = "#action." + action.getType().toString() + "#" + gson.toJson(action, action.getType().toClass());
out.write(string.getBytes());
out.write('\n');
}
}catch (Exception e) {
e.printStackTrace();
return false;
}
return true;
}
public boolean saveAllDataIntoFile(OutputStream out){
Gson gson = new Gson();
String string;
java.lang.reflect.Type type;
try {
// save deviceId
string = gson.toJson(deviceId, int.class);
out.write(string.getBytes());
out.write('\n');
//save freeFlowTableIDListMap
type = new TypeToken<ConcurrentHashMap<OFTableType, List<Byte>>>(){}.getType();
string = gson.toJson(freeFlowTableIDListMap, type);
out.write(string.getBytes());
out.write('\n');
//save counterTable
type = new TypeToken<OFDataTable<OFCounter>>(){}.getType();
string = gson.toJson(counterTable, type);
out.write(string.getBytes());
out.write('\n');
//save groupTable
// type = new TypeToken<OFDataTable<OFGroupMod>>(){}.getType();
// string = gson.toJson(groupTable, type);
// out.write(string.getBytes());
// out.write('\n');
//save group
if(false == saveGroupTableIntoFile(out)){
return false;
}
//save meterTable
type = new TypeToken<OFDataTable<OFMeterMod>>(){}.getType();
string = gson.toJson(meterTable, type);
out.write(string.getBytes());
out.write('\n');
//save flowTablesMap
type = new TypeToken<ConcurrentHashMap<Byte, OFFlowTable>>(){}.getType();
string = gson.toJson(flowTablesMap, type);
out.write(string.getBytes());
out.write('\n');
//save flowTableDatabaseMap
Iterator<Byte> flowTableDatabaseItor = flowTableDatabaseMap.keySet().iterator();
byte tableId;
PMFlowTableDatabase flowTableDatabase;
String tableIdFlagString;
while(flowTableDatabaseItor.hasNext()){
tableId = flowTableDatabaseItor.next();
flowTableDatabase = flowTableDatabaseMap.get(tableId);
tableIdFlagString = "#tableid#" + tableId;
out.write(tableIdFlagString.getBytes());
out.write('\n');
if(false == flowTableDatabase.saveAllDataIntoFile(out)){
return false;
}
}
} catch (Exception e) {
e.printStackTrace();
return false;
}
return true;
}
public boolean loadAllDataFromFile(BufferedReader br, List<String> returnedCurLineString, boolean switchConnected){
Gson gson = new Gson();
String lineString;
java.lang.reflect.Type type;
try {
//read deviceId
lineString = br.readLine();
deviceId = (int)(long)gson.fromJson(lineString, long.class);
//read freeFlowTableIDListMap
lineString = br.readLine();
type = new TypeToken<ConcurrentHashMap<OFTableType, List<Byte>>>(){}.getType();
freeFlowTableIDListMap = gson.fromJson(lineString, type);
//read conterTable
lineString = br.readLine();
type = new TypeToken<OFDataTable<OFCounter>>(){}.getType();
counterTable = gson.fromJson(lineString, type);
//read groupTable
if(false == loadGroupTableFromFile(br, returnedCurLineString)){
return false;
}
//read meterTable
lineString = returnedCurLineString.get(0);
type = new TypeToken<OFDataTable<OFMeterMod>>(){}.getType();
meterTable = gson.fromJson(lineString, type);
//read flowTablesMap
lineString = br.readLine();
type = new TypeToken<ConcurrentHashMap<Byte, OFFlowTable>>(){}.getType();
flowTablesMap = gson.fromJson(lineString, type);
//reset logs
ofmInLogList.clear();
ofmOutLogList.clear();
ofmInLogMap.clear();
ofmOutLogMap.clear();
sendedOfmQueue.clear();
oldBackupOfmMap.clear();
//save flowTableDatabaseMap
if(null == flowTableDatabaseMap){
flowTableDatabaseMap = new ConcurrentHashMap<Byte, PMFlowTableDatabase>();
}else{
flowTableDatabaseMap.clear();
}
byte tableId;
lineString = br.readLine();
PMFlowTableDatabase flowTableDatabase;
while( null != lineString && lineString.contains("#tableid#") ){
tableId = Byte.parseByte( lineString.substring("#tableid#".length()));
flowTableDatabase = flowTableDatabaseMap.get(tableId);
if(null == flowTableDatabase){
flowTableDatabase = new PMFlowTableDatabase(tableId);
flowTableDatabaseMap.put(tableId, flowTableDatabase);
}
if(false == flowTableDatabase.loadAllDataFromFile(tableId, br, returnedCurLineString)){
return false;
}
lineString = returnedCurLineString.get(0);
}
if(switchConnected){
int[] tableNum = new int[OFTableType.OF_MAX_TABLE_TYPE.getValue()];
Iterator<Byte> tableIdIter = flowTablesMap.keySet().iterator();
byte tableID;
OFFlowTable ofTable;
while(tableIdIter.hasNext()){
tableID = tableIdIter.next();
ofTable = flowTablesMap.get(tableID);
tableNum[ofTable.getTableType().ordinal()]++;
}
for(byte tableType = 0; tableType < OFTableType.OF_MAX_TABLE_TYPE.getValue(); tableType++){
flowTableNoMap.put(OFTableType.values()[tableType], (byte)(flowTableNoBaseMap.get(OFTableType.values()[tableType]) + tableNum[tableType]) );
}
}
}catch (Exception e) {
e.printStackTrace();
return false;
}
returnedCurLineString.add(0, lineString);
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((counterTable == null) ? 0 : counterTable.hashCode());
result = prime * result + deviceId;
result = prime * result + ((flowTableDatabaseMap == null) ? 0 : flowTableDatabaseMap.hashCode());
result = prime * result + ((flowTableNoBaseMap == null) ? 0 : flowTableNoBaseMap.hashCode());
result = prime * result + ((flowTableNoMap == null) ? 0 : flowTableNoMap.hashCode());
result = prime * result + ((flowTableResource == null) ? 0 : flowTableResource.hashCode());
result = prime * result + ((flowTablesMap == null) ? 0 : flowTablesMap.hashCode());
result = prime * result + ((freeFlowTableIDListMap == null) ? 0 : freeFlowTableIDListMap.hashCode());
result = prime * result + ((groupTable == null) ? 0 : groupTable.hashCode());
result = prime * result + ((meterTable == null) ? 0 : meterTable.hashCode());
result = prime * result + ((ofmInLogList == null) ? 0 : ofmInLogList.hashCode());
result = prime * result + ((ofmInLogMap == null) ? 0 : ofmInLogMap.hashCode());
result = prime * result + ((ofmOutLogList == null) ? 0 : ofmOutLogList.hashCode());
result = prime * result + ((ofmOutLogMap == null) ? 0 : ofmOutLogMap.hashCode());
result = prime * result + ((oldBackupOfmMap == null) ? 0 : oldBackupOfmMap.hashCode());
result = prime * result + ((portsMap == null) ? 0 : portsMap.hashCode());
result = prime * result + ((sendedOfmQueue == null) ? 0 : sendedOfmQueue.hashCode());
result = prime * result + ((switchFeatures == null) ? 0 : switchFeatures.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
PMSwitchDatabase other = (PMSwitchDatabase) obj;
if (counterTable == null) {
if (other.counterTable != null)
return false;
} else if (!counterTable.equals(other.counterTable))
return false;
if (deviceId != other.deviceId)
return false;
if (flowTableDatabaseMap == null) {
if (other.flowTableDatabaseMap != null)
return false;
} else if (!flowTableDatabaseMap.equals(other.flowTableDatabaseMap))
return false;
if (flowTableNoBaseMap == null) {
if (other.flowTableNoBaseMap != null)
return false;
} else if (!flowTableNoBaseMap.equals(other.flowTableNoBaseMap))
return false;
if (flowTableNoMap == null) {
if (other.flowTableNoMap != null)
return false;
} else if (!flowTableNoMap.equals(other.flowTableNoMap))
return false;
if (flowTableResource == null) {
if (other.flowTableResource != null)
return false;
} else if (!flowTableResource.equals(other.flowTableResource))
return false;
if (flowTablesMap == null) {
if (other.flowTablesMap != null)
return false;
} else if (!flowTablesMap.equals(other.flowTablesMap))
return false;
if (freeFlowTableIDListMap == null) {
if (other.freeFlowTableIDListMap != null)
return false;
} else if (!freeFlowTableIDListMap.equals(other.freeFlowTableIDListMap))
return false;
if (groupTable == null) {
if (other.groupTable != null)
return false;
} else if (!groupTable.equals(other.groupTable))
return false;
if (meterTable == null) {
if (other.meterTable != null)
return false;
} else if (!meterTable.equals(other.meterTable))
return false;
if (ofmInLogList == null) {
if (other.ofmInLogList != null)
return false;
} else if (!ofmInLogList.equals(other.ofmInLogList))
return false;
if (ofmInLogMap == null) {
if (other.ofmInLogMap != null)
return false;
} else if (!ofmInLogMap.equals(other.ofmInLogMap))
return false;
if (ofmOutLogList == null) {
if (other.ofmOutLogList != null)
return false;
} else if (!ofmOutLogList.equals(other.ofmOutLogList))
return false;
if (ofmOutLogMap == null) {
if (other.ofmOutLogMap != null)
return false;
} else if (!ofmOutLogMap.equals(other.ofmOutLogMap))
return false;
if (oldBackupOfmMap == null) {
if (other.oldBackupOfmMap != null)
return false;
} else if (!oldBackupOfmMap.equals(other.oldBackupOfmMap))
return false;
if (portsMap == null) {
if (other.portsMap != null)
return false;
} else if (!portsMap.equals(other.portsMap))
return false;
if (sendedOfmQueue == null) {
if (other.sendedOfmQueue != null)
return false;
} else if (!sendedOfmQueue.equals(other.sendedOfmQueue))
return false;
if (switchFeatures == null) {
if (other.switchFeatures != null)
return false;
} else if (!switchFeatures.equals(other.switchFeatures))
return false;
return true;
}
}
| 36.8308
| 155
| 0.587285
|
2c6912777f5bf290a6fde26797bd58293fdc0704
| 1,120
|
/*
* Copyright (C) 2018-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package jdocs.circuitbreaker;
import akka.actor.AbstractActor;
import akka.pattern.CircuitBreaker;
import java.time.Duration;
import java.util.Optional;
import java.util.function.BiFunction;
public class EvenNoFailureJavaExample extends AbstractActor {
//#even-no-as-failure
private final CircuitBreaker breaker;
public EvenNoFailureJavaExample() {
this.breaker = new CircuitBreaker(
getContext().getDispatcher(), getContext().getSystem().getScheduler(),
5, Duration.ofSeconds(10), Duration.ofMinutes(1));
}
public int luckyNumber() {
BiFunction<Optional<Integer>, Optional<Throwable>, Boolean> evenNoAsFailure =
(result, err) -> (result.isPresent() && result.get() % 2 == 0);
// this will return 8888 and increase failure count at the same time
return this.breaker.callWithSyncCircuitBreaker(() -> 8888, evenNoAsFailure);
}
//#even-no-as-failure
@Override
public Receive createReceive() {
return null;
}
}
| 29.473684
| 86
| 0.676786
|
3f9e1258536da5955ed6a846fbe25ce074547625
| 7,171
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lens.cube.metadata.timeline;
import static org.testng.Assert.*;
import java.lang.reflect.InvocationTargetException;
import java.util.*;
import org.apache.lens.cube.metadata.TestTimePartition;
import org.apache.lens.cube.metadata.TimePartition;
import org.apache.lens.cube.metadata.UpdatePeriod;
import org.apache.lens.cube.metadata.UpdatePeriodTest;
import org.apache.lens.server.api.error.LensException;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class TestPartitionTimelines {
private static final String TABLE_NAME = "storage_fact";
private static final String PART_COL = "pt";
private static final List<Class<? extends PartitionTimeline>> TIMELINE_IMPLEMENTATIONS = Arrays.asList(
StoreAllPartitionTimeline.class,
EndsAndHolesPartitionTimeline.class,
RangesPartitionTimeline.class
);
@DataProvider(name = "update-periods")
public Object[][] provideUpdatePeriods() {
return UpdatePeriodTest.provideUpdatePeriods();
}
@DataProvider(name = "update-periods-and-timeline-classes")
public Object[][] provideUpdatePeriodsAndTimelineClasses() {
UpdatePeriod[] values = UpdatePeriod.values();
Object[][] ret = new Object[values.length * TIMELINE_IMPLEMENTATIONS.size()][2];
for (int i = 0; i < values.length; i++) {
for (int j = 0; j < TIMELINE_IMPLEMENTATIONS.size(); j++) {
ret[TIMELINE_IMPLEMENTATIONS.size() * i + j] = new Object[]{
values[i],
TIMELINE_IMPLEMENTATIONS.get(j),
};
}
}
return ret;
}
@Test(dataProvider = "update-periods")
public void testEquivalence(UpdatePeriod period) throws LensException, InvocationTargetException,
NoSuchMethodException, InstantiationException, IllegalAccessException {
final Random randomGenerator = new Random();
for (int j = 0; j < 10; j++) {
List<PartitionTimeline> timelines = Lists.newArrayList();
for (Class<? extends PartitionTimeline> clazz : TIMELINE_IMPLEMENTATIONS) {
timelines.add(getInstance(clazz, period));
}
final List<TimePartition> addedPartitions = Lists.newArrayList();
for (int i = 0; i < 20; i++) {
int randomInt = randomGenerator.nextInt(10) - 5;
TimePartition part = TimePartition.of(period, TestTimePartition.timeAtDiff(TestTimePartition.NOW, period,
randomInt));
addedPartitions.add(part);
for (PartitionTimeline timeline : timelines) {
timeline.add(part);
}
assertSameTimelines(timelines);
}
assertSameTimelines(timelines);
Collections.shuffle(addedPartitions);
Iterator<TimePartition> iter = addedPartitions.iterator();
while (iter.hasNext()) {
TimePartition part = iter.next();
iter.remove();
if (!addedPartitions.contains(part)) {
for (PartitionTimeline timeline : timelines) {
timeline.drop(part);
assertTrue(timeline.isConsistent());
}
}
}
for (PartitionTimeline timeline : timelines) {
assertTrue(timeline.isEmpty());
}
}
}
public static void assertSameTimelines(List<PartitionTimeline> timelines) {
List<Iterator<TimePartition>> iterators = Lists.newArrayList();
for (PartitionTimeline timeline : timelines) {
iterators.add(timeline.iterator());
}
while (iterators.get(0).hasNext()) {
Map<Class, TimePartition> parts = Maps.newHashMap();
for (Iterator<TimePartition> iterator : iterators) {
assertTrue(iterator.hasNext());
parts.put(iterator.getClass(), iterator.next());
}
assertEquals(new HashSet<>(parts.values()).size(), 1, "More than one values for next: " + parts.values());
}
for (Iterator<TimePartition> iterator : iterators) {
assertFalse(iterator.hasNext());
}
}
private <T extends PartitionTimeline> T getInstance(Class<T> clz, UpdatePeriod period) throws
NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException {
return clz.getConstructor(String.class, UpdatePeriod.class, String.class)
.newInstance(TABLE_NAME, period, PART_COL);
}
@Test(dataProvider = "update-periods-and-timeline-classes")
public <T extends PartitionTimeline> void testPropertiesContract(UpdatePeriod period, Class<T> clz) throws
LensException, InvocationTargetException, NoSuchMethodException, InstantiationException, IllegalAccessException {
// Make two instances, one to modify, other to validate against
T inst1 = getInstance(clz, period);
T inst2 = getInstance(clz, period);
// whenever we'll init from props, timeline should become empty.
Map<String, String> props = inst1.toProperties();
assertTrue(inst2.initFromProperties(props));
// init from props of an empty timeline: should succeed and make the timeline empty
assertEquals(inst1, inst2);
assertTrue(inst1.isEmpty());
assertTrue(inst2.isEmpty());
// Add single partition and test for non-equivalence
assertTrue(inst1.add(TimePartition.of(period, TestTimePartition.NOW)));
assertFalse(inst1.equals(inst2));
// add same parittion in other timeline, test for equality
assertTrue(inst2.add(TimePartition.of(period, TestTimePartition.NOW)));
assertTrue(inst1.isConsistent());
assertTrue(inst2.isConsistent());
assertEquals(inst1, inst2);
// init with blank properties. Should become empty
assertTrue(inst2.initFromProperties(props));
assertFalse(inst1.equals(inst2));
// init from properties of timeline with single partition.
assertTrue(inst2.initFromProperties(inst1.toProperties()));
assertEquals(inst1, inst2);
// clear timelines
inst1.initFromProperties(props);
inst2.initFromProperties(props);
// Make sparse partition range in one, init other from its properties. Test equality.
for (int i = 0; i < 500; i++) {
assertTrue(inst1.add(TimePartition.of(period, TestTimePartition.timeAtDiff(TestTimePartition.NOW, period,
i * 2))));
}
assertTrue(inst1.isConsistent());
inst2.initFromProperties(inst1.toProperties());
assertTrue(inst2.isConsistent());
assertEquals(inst1, inst2);
}
}
| 40.514124
| 117
| 0.713011
|
2a26f94b98c0b2ed6c2d0f0802e91661011d0b2c
| 768
|
package com.tutorial;
public class Main {
public static void main(String[] args){
// Operator assignment
// Assignment operator jumlah
int a = 1;
a += 10; // a = a + 10;
System.out.println("nilai a = " + a);
// Assignment operator pengurangan
int b = 100;
b -= 25;
System.out.println("nilai b = " + b);
// Assignment operator perkalian
int c = 100;
c *= 20;
System.out.println("nilai c = " + c);
// Assignment operator pembagian
int d = 100;
d /= 20;
System.out.println("nilai d = " + d);
// Assignment operator modulus
int e = 10;
e %= 7;
System.out.println("nilai e = " + e);
}
}
| 20.756757
| 45
| 0.492188
|
ce0267d9a17ab79aa8eecef5fd9748938efcdecd
| 767
|
package com.testfairy.flutterexample;
import android.content.Context;
import android.os.Bundle;
import android.view.View;
import androidx.annotation.Nullable;
import io.flutter.embedding.android.FlutterActivity;
import io.flutter.embedding.android.SplashScreen;
public class MainActivity extends FlutterActivity {
public static class MySplashScreen implements SplashScreen {
@Nullable
@Override
public View createSplashView(Context context, @Nullable Bundle savedInstanceState) {
final View v = new View(context);
return v;
}
@Override
public void transitionToFlutter(Runnable onTransitionComplete) {
onTransitionComplete.run();
}
}
@Nullable
@Override
public SplashScreen provideSplashScreen() {
return new MySplashScreen();
}
}
| 23.242424
| 86
| 0.787484
|
278ed31838d617a8bc7d0c6815826b909d3b0d74
| 2,788
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package net.gcolin.rest;
import java.io.InputStream;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import javax.ws.rs.ext.MessageBodyReader;
import javax.ws.rs.ext.MessageBodyWriter;
/**
* An internal data structure.
*
* @author Gaël COLIN
* @since 1.0
*/
public class InvocationContext {
private Map<String, Object> attributes;
private FastMediaType consume;
private FastMediaType produce;
private MessageBodyReader<Object> reader;
private MessageBodyWriter<Object> writer;
private InputStream entityStream;
public InvocationContext() {}
public InvocationContext(Map<String, Object> attributes) {
this.attributes = attributes;
}
private Map<String, Object> getAttributes() {
if (attributes == null) {
attributes = new HashMap<>();
}
return attributes;
}
public Object getProperty(String name) {
return getAttributes().get(name);
}
public Collection<String> getPropertyNames() {
return getAttributes().keySet();
}
public void setProperty(String name, Object object) {
getAttributes().put(name, object);
}
public void removeProperty(String name) {
getAttributes().remove(name);
}
public FastMediaType getConsume() {
return consume;
}
public void setConsume(FastMediaType consume) {
this.consume = consume;
}
public FastMediaType getProduce() {
return produce;
}
public void setProduce(FastMediaType produce) {
this.produce = produce;
}
public MessageBodyReader<Object> getReader() {
return reader;
}
public void setReader(MessageBodyReader<Object> reader) {
this.reader = reader;
}
public MessageBodyWriter<Object> getWriter() {
return writer;
}
public void setWriter(MessageBodyWriter<Object> writer) {
this.writer = writer;
}
public InputStream getEntityStream() {
return entityStream;
}
public void setEntityStream(InputStream entityStream) {
this.entityStream = entityStream;
}
}
| 25.345455
| 100
| 0.727044
|
49c37325ffc82febe2ec3919d56422a3f95c6f28
| 2,751
|
package com.tchappy.photo.util;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
public class INFO {
public static class Query_Info implements Serializable {
private String uname;
private String uphone;
private String add_time;
private String yw_type;
public String getUname() {
return uname;
}
public void setUname(String uname) {
this.uname = uname;
}
public String getUphone() {
return uphone;
}
public void setUphone(String uphone) {
this.uphone = uphone;
}
public String getAdd_time() {
return add_time;
}
public void setAdd_time(String add_time) {
this.add_time = add_time;
}
public String getYw_type() {
return yw_type;
}
public void setYw_type(String yw_type) {
this.yw_type = yw_type;
}
}
public static class Server_Info implements Serializable {
private String info;
private String status;
private int total;
private int perPageSize;
ArrayList<Query_Info> query_info;
public String getInfo() {
return info;
}
public void setInfo(String info) {
this.info = info;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public int getTotal() {
return total;
}
public void setTotal(int total) {
this.total = total;
}
public int getPerPageSize() {
return perPageSize;
}
public void setPerPageSize(int perPageSize) {
this.perPageSize = perPageSize;
}
public ArrayList<Query_Info> getQuery_info() {
return query_info;
}
public void setQuery_info(ArrayList<Query_Info> query_info) {
this.query_info = query_info;
}
}
public static class Notice_Info implements Serializable {
private String title;
private String add_time;
/*获取广告内容*/
public String getTitle() {
return title;
}
/*设置广告内容*/
public void setTitle(String title) {
this.title = title;
}
/*获取广告时间*/
public String getAdd_time() {
return add_time;
}
/*设置广告时间*/
public void setAdd_time(String add_time) {
this.add_time = add_time;
}
}
public static class Notice_Server implements Serializable {
private String info;
private String status;
ArrayList<Notice_Info> notice_info;
/*获取返回信息*/
public String getInfo() {
return info;
}
/*设置返回信息*/
public void setInfo(String info) {
this.info = info;
}
/*获取返回状态*/
public String getStatus() {
return status;
}
/*设置返回状态*/
public void setStatus(String status) {
this.status = status;
}
/*获取广告列表*/
public ArrayList<Notice_Info> getNotice_info() {
return notice_info;
}
/*设置广告列表*/
public void setNotice_info(ArrayList<Notice_Info> notice_info) {
this.notice_info = notice_info;
}
}
}
| 19.791367
| 66
| 0.685569
|
f493807cb29192c4436a65bc305aed794f6a9345
| 2,139
|
package hwswbuilder.structures;
import hwswbuilder.command.Workspace;
import java.util.Arrays;
import java.util.List;
public interface CodeProducer {
void nuSMVDeclaration(Workspace workspace);
class InputInfo {
final String argName;
final String nusmvType;
final boolean injectFailure;
final boolean isConstant;
private InputInfo(String argName, String nusmvType, boolean injectFailure, boolean isConstant) {
this.argName = argName;
this.nusmvType = nusmvType;
this.injectFailure = injectFailure;
this.isConstant = isConstant;
}
InputInfo(String argName, String nusmvType, int division, boolean injectFailure) {
this(argName + "_DIV" + division, nusmvType, injectFailure, false);
}
InputInfo(String binConstValue) {
this(binConstValue, "boolean", false, true);
}
String failureName(boolean full) {
assert !isConstant;
return argName + "_AFTER_FAILURE" + (full ? ".OUT1" : "");
}
/**
* Failure declaration with custom argument name.
* @param arg: argument name.
* @return fault: module declaration.
*/
List<String> failureDecl(String arg, Workspace workspace) {
assert !isConstant;
final String blockName = workspace.vanishingFailures ?
"INJECT_VANISHING_FAILURE" : "INJECT_FAILURE";
return Arrays.asList(
String.format("%s_SUBS: %s", arg, nusmvType),
String.format("%s: %s(%s, %s_FAULT, TRUE, %s_SUBS, FALSE, TRUE, FAILURE_VANISHED)",
failureName(false), blockName, arg, arg, arg)
);
}
@Override
public String toString() {
return "InputInfo{" +
"argName='" + argName + '\'' +
", nusmvType='" + nusmvType + '\'' +
", injectFailure=" + injectFailure +
", isConstant=" + isConstant +
'}';
}
}
}
| 33.421875
| 104
| 0.557737
|
fa745beea3856787fc1b3357903c90cd3975edd6
| 3,211
|
/* Copyright 2002-2022 CS GROUP
* Licensed to CS GROUP (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.data;
import java.util.regex.Pattern;
/** Interface for providing data files to {@link DataLoader file loaders}.
* <p>
* This interface defines a generic way to explore some collection holding
* data files and load some of them. The collection may be a list of resources
* in the classpath, a directories tree in filesystem, a zip or jar archive,
* a database, a connexion to a remote server ...
* </p>
* <p>
* The proper way to use this interface is to configure one or more
* implementations and register them in the {@link DataProvidersManager data
* providers manager singleton}, or to let this manager use its default
* configuration. Once registered, they will be used automatically whenever
* some data needs to be loaded. This allow high level applications developers
* to customize Orekit data loading mechanism and get a tighter integration of
* the library within their application.
* </p>
* @see DataLoader
* @see DataProvidersManager
* @author Luc Maisonobe
*/
public interface DataProvider {
/** Pattern for name of zip/jar archives. */
Pattern ZIP_ARCHIVE_PATTERN = Pattern.compile("(.*)(?:(?:\\.zip)|(?:\\.jar))$");
/** Feed a data file loader by browsing the data collection.
* <p>
* The method crawls all files referenced in the instance (for example
* all files in a directories tree) and for each file supported by the
* file loader it asks the file loader to load it.
* </p>
* <p>
* If the method completes without exception, then the data loader
* is considered to have been fed successfully and the top level
* {@link DataProvidersManager data providers manager} will return
* immediately without attempting to use the next configured providers.
* </p>
* <p>
* If the method completes abruptly with an exception, then the top level
* {@link DataProvidersManager data providers manager} will try to use
* the next configured providers, in case another one can feed the
* {@link DataLoader data loader}.
* </p>
*
* @param supported pattern for file names supported by the visitor
* @param visitor data file visitor to use
* @param manager with the filters to apply to the resources.
* @return true if some data has been loaded
*/
boolean feed(Pattern supported, DataLoader visitor, DataProvidersManager manager);
}
| 43.986301
| 86
| 0.721893
|
0b51c96f035f8a284b919828646ce127ad0f69e7
| 14,462
|
package com.pontusvision.gdpr;
import com.google.gson.Gson;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
/**
* Unit test for simple App.
*/
public class AppTest
extends TestCase
{
/**
* Create the test case
*
* @param testName name of the test case
*/
public AppTest( String testName )
{
super( testName );
}
/**
* @return the suite of tests being tested
*/
public static Test suite()
{
return new TestSuite( AppTest.class );
}
/**
* Rigourous Test :-)
*/
public void testSimpleFilters()
{
RecordRequest req = new RecordRequest();
req.dataType = "Person.Natural";
req.filters = new PVGridFilters[2] ;
req.filters[0] = new PVGridFilters();
req.filters[0].colId="Person.Natural.Full_Name";
req.filters[0].type="contains";
req.filters[0].filter="Leo";
req.filters[0].filterType="text";
req.filters[1] = new PVGridFilters();
req.filters[1].colId="Person.Natural.Last_Name";
req.filters[1].type="equals";
req.filters[1].filter="Martins";
req.filters[1].filterType="text";
String idxSearch = Resource.getIndexSearchStr(req);
System.out.println (idxSearch);
assertEquals( "(v.\"Person.Natural.Full_Name\":*Leo*) AND (v.\"Person.Natural.Last_Name\":Martins)" , idxSearch );
}
public void testSingleFilter()
{
RecordRequest req = new RecordRequest();
req.dataType = "Person.Natural";
req.filters = new PVGridFilters[1] ;
req.filters[0] = new PVGridFilters();
req.filters[0].colId="Person.Natural.Full_Name";
req.filters[0].type="contains";
req.filters[0].filter="Leo";
req.filters[0].filterType="text";
String idxSearch = Resource.getIndexSearchStr(req);
System.out.println (idxSearch);
assertEquals( "(v.\"Person.Natural.Full_Name\":*Leo*)" , idxSearch );
}
public void testSingleDateFilterInRange()
{
RecordRequest req = new RecordRequest();
req.dataType = "Person.Natural";
req.filters = new PVGridFilters[1] ;
req.filters[0] = new PVGridFilters();
req.filters[0].colId="Person.Natural.Date_Of_Birth";
req.filters[0].type="inRange";
req.filters[0].dateFrom="02-08-1972";
req.filters[0].dateTo="02-08-1992";
req.filters[0].filterType="date";
String idxSearch = Resource.getIndexSearchStr(req);
System.out.println (idxSearch);
assertEquals( "(v.\"Person.Natural.Date_Of_Birth\":[ 02-08-1972 TO 02-08-1992 ])" , idxSearch );
}
public void testSingleDateFilterNotEquals()
{
RecordRequest req = new RecordRequest();
req.dataType = "Person.Natural";
req.filters = new PVGridFilters[1] ;
req.filters[0] = new PVGridFilters();
req.filters[0].colId="Person.Natural.Date_Of_Birth";
req.filters[0].type="notEqual";
req.filters[0].dateFrom="02-08-1972";
req.filters[0].dateTo="02-08-1992";
req.filters[0].filterType="date";
String idxSearch = Resource.getIndexSearchStr(req);
System.out.println (idxSearch);
assertEquals( "(( (v.\"Person.Natural.Date_Of_Birth\":{ * TO 02-08-1972 } ) OR (v.\"Person.Natural.Date_Of_Birth\":{ 02-08-1972 TO * } ) ))" , idxSearch );
}
public void testSingleDateFilterEquals()
{
RecordRequest req = new RecordRequest();
req.dataType = "Person.Natural";
req.filters = new PVGridFilters[1] ;
req.filters[0] = new PVGridFilters();
req.filters[0].colId="Person.Natural.Date_Of_Birth";
req.filters[0].type="equals";
req.filters[0].dateFrom="02-08-1972";
req.filters[0].dateTo="02-08-1992";
req.filters[0].filterType="date";
String idxSearch = Resource.getIndexSearchStr(req);
System.out.println (idxSearch);
assertEquals( "(v.\"Person.Natural.Date_Of_Birth\":[ 02-08-1972 TO 02-08-1972 ])" , idxSearch );
}
public void testSingleDateFilterGreaterThan()
{
RecordRequest req = new RecordRequest();
req.dataType = "Person.Natural";
req.filters = new PVGridFilters[1] ;
req.filters[0] = new PVGridFilters();
req.filters[0].colId="Person.Natural.Date_Of_Birth";
req.filters[0].type="greaterThan";
req.filters[0].dateFrom="02-08-1972";
req.filters[0].dateTo="02-08-1992";
req.filters[0].filterType="date";
String idxSearch = Resource.getIndexSearchStr(req);
System.out.println (idxSearch);
assertEquals( "(v.\"Person.Natural.Date_Of_Birth\":{ 02-08-1972 TO * })" , idxSearch );
}
public void testSingleDateFilterLessThan()
{
RecordRequest req = new RecordRequest();
req.dataType = "Person.Natural";
req.filters = new PVGridFilters[1] ;
req.filters[0] = new PVGridFilters();
req.filters[0].colId="Person.Natural.Date_Of_Birth";
req.filters[0].type="lessThan";
req.filters[0].dateFrom="02-08-1972";
req.filters[0].dateTo="02-08-1992";
req.filters[0].filterType="date";
String idxSearch = Resource.getIndexSearchStr(req);
System.out.println (idxSearch);
assertEquals( "(v.\"Person.Natural.Date_Of_Birth\":{ * TO 02-08-1972 })" , idxSearch );
}
public void testComplexFilters()
{
RecordRequest req = new RecordRequest();
req.dataType = "Person.Natural";
req.filters = new PVGridFilters[2] ;
req.filters[0] = new PVGridFilters();
req.filters[0].colId="Person.Natural.Full_Name";
req.filters[0].operator="OR";
req.filters[0].condition1= new PVGridFilterCondition();
req.filters[0].condition1.filter = "Renata";
req.filters[0].condition1.type = "notContains";
req.filters[0].condition1.filterType="text";
req.filters[0].condition2= new PVGridFilterCondition();
req.filters[0].condition2.filter = "Leonardo";
req.filters[0].condition2.type = "notEqual";
req.filters[0].condition2.filterType="text";
req.filters[1] = new PVGridFilters();
req.filters[1].colId="Person.Natural.Last_Name";
req.filters[1].type="equals";
req.filters[1].filter="Martins";
req.filters[1].filterType="text";
String idxSearch = Resource.getIndexSearchStr(req);
System.out.println (idxSearch);
assertEquals( "((v.\"Person.Natural.Full_Name\":*!*Renata*) OR (v.\"Person.Natural.Full_Name\":*!Leonardo)) AND (v.\"Person.Natural.Last_Name\":Martins)" , idxSearch );
}
public void testComplexFiltersStartsEndsWith()
{
RecordRequest req = new RecordRequest();
req.dataType = "Person.Natural";
req.filters = new PVGridFilters[2] ;
req.filters[0] = new PVGridFilters();
req.filters[0].colId="Person.Natural.Full_Name";
req.filters[0].operator="OR";
req.filters[0].condition1= new PVGridFilterCondition();
req.filters[0].condition1.filter = "Renata";
req.filters[0].condition1.type = "startsWith";
req.filters[0].condition1.filterType="text";
req.filters[0].condition2= new PVGridFilterCondition();
req.filters[0].condition2.filter = "Leonardo";
req.filters[0].condition2.type = "notEqual";
req.filters[0].condition2.filterType="text";
req.filters[1] = new PVGridFilters();
req.filters[1].colId="Person.Natural.Last_Name";
req.filters[1].type="endsWith";
req.filters[1].filter="Martins";
req.filters[1].filterType="text";
String idxSearch = Resource.getIndexSearchStr(req);
System.out.println (idxSearch);
assertEquals( "((v.\"Person.Natural.Full_Name\":Renata*) OR (v.\"Person.Natural.Full_Name\":*!Leonardo)) AND (v.\"Person.Natural.Last_Name\":*Martins)" , idxSearch );
}
public void testComplexDateFilters()
{
RecordRequest req = new RecordRequest();
req.dataType = "Person.Natural";
req.filters = new PVGridFilters[2] ;
req.filters[0] = new PVGridFilters();
req.filters[0].colId="Person.Natural.Date_Of_Birth";
req.filters[0].operator="OR";
req.filters[0].condition1= new PVGridFilterCondition();
req.filters[0].condition1.dateFrom = "01-02-1999";
req.filters[0].condition1.dateTo = "01-02-2009";
req.filters[0].condition1.type = "inRange";
req.filters[0].condition1.filterType="date";
req.filters[0].condition2= new PVGridFilterCondition();
req.filters[0].condition2.dateFrom = "11-02-1999";
req.filters[0].condition2.dateTo = null;
req.filters[0].condition2.type = "notEqual";
req.filters[0].condition2.filterType="date";
req.filters[0].filterType="date";
req.filters[1] = new PVGridFilters();
req.filters[1].colId="Person.Natural.Last_Name";
req.filters[1].type="endsWith";
req.filters[1].filter="Martins";
req.filters[1].filterType="text";
String idxSearch = Resource.getIndexSearchStr(req);
System.out.println (idxSearch);
assertEquals( "((v.\"Person.Natural.Date_Of_Birth\":[ 01-02-1999 TO 01-02-2009 ]) OR (( (v.\"Person.Natural.Date_Of_Birth\":{ * TO 11-02-1999 } ) OR (v.\"Person.Natural.Date_Of_Birth\":{ 11-02-1999 TO * } ) ))) AND (v.\"Person.Natural.Last_Name\":*Martins)" , idxSearch );
}
public void testComplexDateFiltersReversedCond()
{
RecordRequest req = new RecordRequest();
req.dataType = "Person.Natural";
req.filters = new PVGridFilters[2] ;
req.filters[0] = new PVGridFilters();
req.filters[0].colId="Person.Natural.Date_Of_Birth";
req.filters[0].operator="OR";
req.filters[0].condition1= new PVGridFilterCondition();
req.filters[0].condition1.dateFrom = "11-02-1999";
req.filters[0].condition1.dateTo = null;
req.filters[0].condition1.type = "notEqual";
req.filters[0].condition1.filterType="date";
req.filters[0].condition2= new PVGridFilterCondition();
req.filters[0].condition2.dateFrom = "01-02-1999";
req.filters[0].condition2.dateTo = "01-02-2009";
req.filters[0].condition2.type = "inRange";
req.filters[0].condition2.filterType="date";
req.filters[0].filterType="date";
req.filters[1] = new PVGridFilters();
req.filters[1].colId="Person.Natural.Last_Name";
req.filters[1].type="endsWith";
req.filters[1].filter="Martins";
req.filters[1].filterType="text";
String idxSearch = Resource.getIndexSearchStr(req);
System.out.println (idxSearch);
assertEquals( "((( (v.\"Person.Natural.Date_Of_Birth\":{ * TO 11-02-1999 } ) OR (v.\"Person.Natural.Date_Of_Birth\":{ 11-02-1999 TO * } ) )) OR (v.\"Person.Natural.Date_Of_Birth\":[ 01-02-1999 TO 01-02-2009 ])) AND (v.\"Person.Natural.Last_Name\":*Martins)" , idxSearch );
}
public void testComplexDateFiltersFromJson()
{
RecordRequest req = new RecordRequest();
req.dataType = "Person.Natural";
Gson gson = new Gson();
req.filters = new PVGridFilters[2];
req.filters[0] = gson.fromJson("{\n"
+ " \"colId\": \"Person.Natural.Date_Of_Birth\",\n"
+ " \"filterType\": \"date\",\n"
+ " \"operator\": \"AND\",\n"
+ " \"condition1\": {\n"
+ " \"dateTo\": null,\n"
+ " \"dateFrom\": \"1964-04-19\",\n"
+ " \"type\": \"notEqual\",\n"
+ " \"filterType\": \"date\"\n"
+ " },\n"
+ " \"condition2\": {\n"
+ " \"dateTo\": \"2005-09-02\",\n"
+ " \"dateFrom\": \"1975-09-02\",\n"
+ " \"type\": \"inRange\",\n"
+ " \"filterType\": \"date\"\n"
+ " }\n"
+ " }\n"
+ "", PVGridFilters.class);
// req.filters[0] = new PVGridFilters();
// req.filters[0].colId="Person.Natural.Date_Of_Birth";
// req.filters[0].operator="OR";
// req.filters[0].condition1= new PVGridFilterCondition();
// req.filters[0].condition1.dateFrom = "11-02-1999";
// req.filters[0].condition1.dateTo = null;
// req.filters[0].condition1.type = "notEqual";
// req.filters[0].condition1.filterType="date";
// req.filters[0].condition2= new PVGridFilterCondition();
// req.filters[0].condition2.dateFrom = "01-02-1999";
// req.filters[0].condition2.dateTo = "01-02-2009";
// req.filters[0].condition2.type = "inRange";
// req.filters[0].condition2.filterType="date";
//
// req.filters[0].filterType="date";
//
req.filters[1] = new PVGridFilters();
req.filters[1].colId="Person.Natural.Last_Name";
req.filters[1].type="endsWith";
req.filters[1].filter="Martins";
req.filters[1].filterType="text";
String idxSearch = Resource.getIndexSearchStr(req);
System.out.println (idxSearch);
assertEquals( "((( (v.\"Person.Natural.Date_Of_Birth\":{ * TO 1964-04-19 } ) OR (v.\"Person.Natural.Date_Of_Birth\":{ 1964-04-19 TO * } ) )) AND (v.\"Person.Natural.Date_Of_Birth\":[ 1975-09-02 TO 2005-09-02 ])) AND (v.\"Person.Natural.Last_Name\":*Martins)" , idxSearch );
}
public void testEmptyFilters()
{
RecordRequest req = new RecordRequest();
req.dataType = "Person.Natural";
req.filters = new PVGridFilters[0] ;
String idxSearch = Resource.getIndexSearchStr(req);
System.out.println (idxSearch);
assertEquals( "" , idxSearch );
}
}
| 35.62069
| 282
| 0.583045
|
3bc93afb0d7c0f7bec568a24b4e663abc3412d74
| 105
|
package com.zenchat.server.message;
public interface MessageHandler<R, T> {
R handle(T request);
}
| 15
| 39
| 0.72381
|
b6ec143330e3a620d6a8ebf18c75d6eb40c9c9c3
| 4,701
|
package com.example.calculadorasimples;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
public class MainActivity extends AppCompatActivity {
private Button btnSoma, btnDiv, btnMulti, btnSub, btnLimpa;
private EditText n1,n2;
private TextView resultado;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
//Aqui instanciamos os Widgets, ou seja, criamos uma variável para um deles
btnSoma = findViewById(R.id.btnSomar);
btnDiv = findViewById(R.id.btnDividir);
btnMulti = findViewById(R.id.btnMultiplicar);
btnSub = findViewById(R.id.btnSubtrair);
btnLimpa = findViewById(R.id.btnLimpar);
n1 = findViewById(R.id.editTextN1);
n2 = findViewById(R.id.editTextN2);
resultado = findViewById(R.id.textViewResultado);
}
//Agora vamos criar os métodos ou ações que cada botão irá fazer ao ser clicado/acionado
public void Somar(View view){
//Aqui estamos pegando os valores dos campos onde o usuário terá inserido os números e assim eles eles estão em formato de CARACTERES (String)
String A = n1.getText().toString().trim();
String B = n2.getText().toString().trim();
//Vamos transformar os caracteres em números para assim conseguirmos fazer as operações desejadas
Double C = Double.parseDouble(A);
Double D = Double.parseDouble(B);
//Aqui realizamos a operação já com ambos em formato de número (DOUBLE)
Double E = C + D;
//E por último vamos colocar o resultado da conta acima na caixa de texto, e para isso temos que transformar o número em carácter, ou seja, em String novamente.
resultado.setText(E.toString());
}
public void Subtrair(View view){
//Aqui estamos pegando os valores dos campos onde o usuário terá inserido os números e assim eles eles estão em formato de CARACTERES (String)
String A = n1.getText().toString().trim();
String B = n2.getText().toString().trim();
//Vamos transformar os caracteres em números para assim conseguirmos fazer as operações desejadas
Double C = Double.parseDouble(A);
Double D = Double.parseDouble(B);
//Aqui realizamos a operação já com ambos em formato de número (DOUBLE)
Double E = C - D;
//E por último vamos colocar o resultado da conta acima na caixa de texto, e para isso temos que transformar o número em carácter, ou seja, em String novamente.
resultado.setText(E.toString());
}
public void Multiplicar(View view){
//Aqui estamos pegando os valores dos campos onde o usuário terá inserido os números e assim eles eles estão em formato de CARACTERES (String)
String A = n1.getText().toString().trim();
String B = n2.getText().toString().trim();
//Vamos transformar os caracteres em números para assim conseguirmos fazer as operações desejadas
Double C = Double.parseDouble(A);
Double D = Double.parseDouble(B);
//Aqui realizamos a operação já com ambos em formato de número (DOUBLE)
Double E = C * D;
//E por último vamos colocar o resultado da conta acima na caixa de texto, e para isso temos que transformar o número em carácter, ou seja, em String novamente.
resultado.setText(E.toString());
}
public void Dividir(View view){
//Aqui estamos pegando os valores dos campos onde o usuário terá inserido os números e assim eles eles estão em formato de CARACTERES (String)
String A = n1.getText().toString().trim();
String B = n2.getText().toString().trim();
//Vamos transformar os caracteres em números para assim conseguirmos fazer as operações desejadas
Double C = Double.parseDouble(A);
Double D = Double.parseDouble(B);
//Aqui realizamos a operação já com ambos em formato de número (DOUBLE)
Double E = C / D;
//E por último vamos colocar o resultado da conta acima na caixa de texto, e para isso temos que transformar o número em carácter, ou seja, em String novamente.
resultado.setText(E.toString());
}
public void Limpar (View view){
//Aqui vamos "setar" o texto para vazio nos campos desejados
n1.setText("");
n2.setText("");
resultado.setText("");
}
}
| 39.838983
| 169
| 0.666029
|
136050d5cddbc0c22085542e562db7d1ba76523c
| 4,467
|
/**
* Copyright 2018 Sharad Singhal, All Rights Reserved
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Created Mar 10, 2018 by sharad
*/
package net.aifusion.asn;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* Unit tests for OctetString
* @author Sharad Singhal
*/
public class OctetStringValueTest {
static byte [] input = {-128,3,5,7,127};
@BeforeClass
public static void setupBefore() throws Exception {
System.out.print("OctetStringValueTest");
}
@AfterClass
public static void tearDownAfter() throws Exception {
System.out.print("\n");
}
@Before
public void setUp() throws Exception {
System.out.print("-");
}
@After
public void tearDown() throws Exception {
System.out.print(".");
return;
}
/**
* Test method for {@link net.aifusion.asn.OctetStringValue#getEncodedValue()}.
*/
@Test
public final void testGetEncoded() {
OctetStringValue s = new OctetStringValue(input);
assertArrayEquals(new byte[] {4,5,-128,3,5,7,127},s.getEncodedValue());
}
/**
* Test method for {@link net.aifusion.asn.OctetStringValue#create(byte[], int, int)};
*/
@Test
public final void testCreate() {
byte [] buffer = new byte[] {4,5,-128,3,5,7,127};
OctetStringValue v = OctetStringValue.create(buffer, buffer.length, 0);
assertArrayEquals(buffer, v.getEncodedValue());
assertArrayEquals(input,v.getValue());
}
@Test
public final void testToAsnValue() {
OctetStringValue s = new OctetStringValue(input);
assertEquals("[OCTET_STRING] ::= { -128 3 5 7 127 }\n",s.toAsnString(""));
}
/**
* Test method for {@link net.aifusion.asn.OctetStringValue#OctetStringValue(byte[])}.
*/
@Test
public final void testOctetString() {
OctetStringValue s = new OctetStringValue(input);
assertNotNull(s);
}
/**
* Test method for {@link net.aifusion.asn.OctetStringValue#getValue()}.
*/
@Test
public final void testGetValue() {
OctetStringValue s = new OctetStringValue(input);
assertArrayEquals(input,s.getValue());
}
/**
* Test method for {@link net.aifusion.asn.OctetStringValue#equals(java.lang.Object)}.
*/
@Test
public final void testEqualsObject() {
OctetStringValue s1 = new OctetStringValue(input);
OctetStringValue s2 = new OctetStringValue(input);
OctetStringValue s3 = new OctetStringValue(new byte[] {54});
assertEquals(s1,s2);
assertNotEquals(s1,s3);
}
/**
* Test method for {@link net.aifusion.asn.OctetStringValue#toString()}.
*/
@Test
public final void testToString() {
OctetStringValue s = new OctetStringValue(input);
assertEquals("{ -128 3 5 7 127 }",s.toString());
}
}
| 33.586466
| 88
| 0.711663
|
90ff55cc1931e57ec181d016c7f1ff70b551bb10
| 3,640
|
/*
* Copyright 2002-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.http.client;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpRequest;
import org.springframework.util.Assert;
import org.springframework.util.StreamUtils;
import org.springframework.util.concurrent.ListenableFuture;
import java.io.IOException;
import java.net.URI;
import java.util.Iterator;
import java.util.List;
/**
* An {@link AsyncClientHttpRequest} wrapper that enriches it proceeds the actual
* request execution with calling the registered interceptors.
*
* @author Jakub Narloch
* @author Rossen Stoyanchev
* @see InterceptingAsyncClientHttpRequestFactory
* @deprecated as of Spring 5.0, with no direct replacement
*/
@Deprecated
class InterceptingAsyncClientHttpRequest extends AbstractBufferingAsyncClientHttpRequest {
private AsyncClientHttpRequestFactory requestFactory;
private List<AsyncClientHttpRequestInterceptor> interceptors;
private URI uri;
private HttpMethod httpMethod;
/**
* Create new instance of {@link InterceptingAsyncClientHttpRequest}.
*
* @param requestFactory the async request factory
* @param interceptors the list of interceptors
* @param uri the request URI
* @param httpMethod the HTTP method
*/
public InterceptingAsyncClientHttpRequest(AsyncClientHttpRequestFactory requestFactory,
List<AsyncClientHttpRequestInterceptor> interceptors, URI uri, HttpMethod httpMethod) {
this.requestFactory = requestFactory;
this.interceptors = interceptors;
this.uri = uri;
this.httpMethod = httpMethod;
}
@Override
protected ListenableFuture<ClientHttpResponse> executeInternal(HttpHeaders headers, byte[] body)
throws IOException {
return new AsyncRequestExecution().executeAsync(this, body);
}
@Override
public HttpMethod getMethod() {
return this.httpMethod;
}
@Override
public String getMethodValue() {
return this.httpMethod.name();
}
@Override
public URI getURI() {
return this.uri;
}
private class AsyncRequestExecution implements AsyncClientHttpRequestExecution {
private Iterator<AsyncClientHttpRequestInterceptor> iterator;
public AsyncRequestExecution() {
this.iterator = interceptors.iterator();
}
@Override
public ListenableFuture<ClientHttpResponse> executeAsync(HttpRequest request, byte[] body)
throws IOException {
if (this.iterator.hasNext()) {
AsyncClientHttpRequestInterceptor interceptor = this.iterator.next();
return interceptor.intercept(request, body, this);
} else {
URI uri = request.getURI();
HttpMethod method = request.getMethod();
HttpHeaders headers = request.getHeaders();
Assert.state(method != null, "No standard HTTP method");
AsyncClientHttpRequest delegate = requestFactory.createAsyncRequest(uri, method);
delegate.getHeaders().putAll(headers);
if (body.length > 0) {
StreamUtils.copy(body, delegate.getBody());
}
return delegate.executeAsync();
}
}
}
}
| 28.888889
| 100
| 0.76044
|
29fb59f04b7394fb3933d8c8f9171bcd164aaf54
| 7,786
|
/**
*
*/
package gov.noaa.pmel.dashboard.shared;
import com.google.gwt.user.client.rpc.IsSerializable;
import java.io.Serializable;
/**
* Represents an uploaded metadata file of unknown contents.
*
* @author Karl Smith
*/
public class DashboardMetadata implements Serializable, IsSerializable {
private static final long serialVersionUID = 8947621857406378016L;
/**
* Separator between the filename and the upload timestamp in additional document titles.
*/
private static final String TITLE_SEPARATOR = " ; ";
protected boolean selected;
protected String datasetId;
protected String filename;
protected String uploadTimestamp;
protected String owner;
protected boolean conflicted;
protected String version;
/**
* Creates an empty metadata document record
*/
public DashboardMetadata() {
selected = false;
datasetId = DashboardUtils.STRING_MISSING_VALUE;
filename = DashboardUtils.STRING_MISSING_VALUE;
uploadTimestamp = DashboardUtils.STRING_MISSING_VALUE;
owner = DashboardUtils.STRING_MISSING_VALUE;
conflicted = false;
version = DashboardUtils.STRING_MISSING_VALUE;
}
/**
* Returns the additional documents title for this metadata.
* Normally this title is the filename, followed by a space, a semicolon, another space, and the upload timestamp.
* If the filename is empty, this title is empty. If the upload timestamp is empty, the title is just the filename.
*/
public String getAddlDocsTitle() {
if ( DashboardUtils.STRING_MISSING_VALUE.equals(filename) )
return "";
if ( DashboardUtils.STRING_MISSING_VALUE.equals(uploadTimestamp) )
return filename;
return filename + TITLE_SEPARATOR + uploadTimestamp;
}
/**
* Returns the metadata filename and the upload timestamp given in the document title.
*
* @param docTitle
* document title to parse
*
* @return string array of length two with the filename as the first string
* and the timestamp as the second filename. If the title is empty,
* both strings in the returned array will be empty. If the title does not
* have a timestamp, the timestamp in the returned array will be empty.
*/
public static String[] splitAddlDocsTitle(String docTitle) {
String[] pieces = docTitle.split(TITLE_SEPARATOR, 2);
if ( pieces.length == 1 )
pieces = new String[] { docTitle, "" };
return pieces;
}
/**
* @return if the metadata document is selected
*/
public boolean isSelected() {
return selected;
}
/**
* @param selected
* set whether this metadata document is selected
*/
public void setSelected(boolean selected) {
this.selected = selected;
}
/**
* @return the dataset ID; never null, but may be {@link DashboardUtils#STRING_MISSING_VALUE}
*/
public String getDatasetId() {
return datasetId;
}
/**
* @param datasetId
* the dataset ID to set; if null, {@link DashboardUtils#STRING_MISSING_VALUE} is assigned
*/
public void setDatasetId(String datasetId) {
if ( datasetId != null )
this.datasetId = datasetId;
else
this.datasetId = DashboardUtils.STRING_MISSING_VALUE;
}
/**
* @return the filename; never null, but may be {@link DashboardUtils#STRING_MISSING_VALUE}
*/
public String getFilename() {
return filename;
}
/**
* @param filename
* the filename to set; if null, {@link DashboardUtils#STRING_MISSING_VALUE} is assigned
*/
public void setFilename(String filename) {
if ( filename != null )
this.filename = filename;
else
this.filename = DashboardUtils.STRING_MISSING_VALUE;
}
/**
* @return the upload timestamp; never null, but may be {@link DashboardUtils#STRING_MISSING_VALUE}
*/
public String getUploadTimestamp() {
return uploadTimestamp;
}
/**
* @param uploadTimestamp
* the upload timestamp to set; if null, {@link DashboardUtils#STRING_MISSING_VALUE} is assigned
*/
public void setUploadTimestamp(String uploadTimestamp) {
if ( uploadTimestamp != null )
this.uploadTimestamp = uploadTimestamp;
else
this.uploadTimestamp = DashboardUtils.STRING_MISSING_VALUE;
;
}
/**
* @return the owner; never null, but may be {@link DashboardUtils#STRING_MISSING_VALUE}
*/
public String getOwner() {
return owner;
}
/**
* @param owner
* the owner to set; if null, {@link DashboardUtils#STRING_MISSING_VALUE} is assigned
*/
public void setOwner(String owner) {
if ( owner != null )
this.owner = owner;
else
this.owner = DashboardUtils.STRING_MISSING_VALUE;
}
/**
* @return true if conflicts have been detected in the metadata; false otherwise.
*/
public boolean isConflicted() {
return conflicted;
}
/**
* @param conflicted
* any conflicts detected in the metadata object?
*/
public void setConflicted(boolean conflicted) {
this.conflicted = conflicted;
}
/**
* @return the version; never null, but may be {@link DashboardUtils#STRING_MISSING_VALUE}
*/
public String getVersion() {
return version;
}
/**
* @param version
* the version to set; if null, {@link DashboardUtils#STRING_MISSING_VALUE} is assigned
*/
public void setVersion(String version) {
if ( version != null )
this.version = version;
else
this.version = DashboardUtils.STRING_MISSING_VALUE;
}
@Override
public int hashCode() {
final int prime = 37;
int result = Boolean.valueOf(selected).hashCode();
result = result * prime + datasetId.hashCode();
result = result * prime + filename.hashCode();
result = result * prime + uploadTimestamp.hashCode();
result = result * prime + owner.hashCode();
result = result * prime + Boolean.valueOf(conflicted).hashCode();
result = result * prime + version.hashCode();
return result;
}
@Override
public boolean equals(Object obj) {
if ( this == obj )
return true;
if ( obj == null )
return false;
if ( !(obj instanceof DashboardMetadata) )
return false;
DashboardMetadata other = (DashboardMetadata) obj;
if ( selected != other.selected )
return false;
if ( !datasetId.equals(other.datasetId) )
return false;
if ( !filename.equals(other.filename) )
return false;
if ( !uploadTimestamp.equals(other.uploadTimestamp) )
return false;
if ( !owner.equals(other.owner) )
return false;
if ( conflicted != other.conflicted )
return false;
if ( !version.equals(other.version) )
return false;
return true;
}
@Override
public String toString() {
return "DashboardMetadata" +
"[ selected=" + Boolean.toString(selected) +
",\n datasetId=" + datasetId +
",\n filename=" + filename +
",\n uploadTimestamp=" + uploadTimestamp +
",\n owner=" + owner +
",\n conflicted=" + Boolean.toString(conflicted) +
",\n version=" + version +
" ]";
}
}
| 30.896825
| 120
| 0.6084
|
6999422c46305e0154452a7dd710b1a23d81b8f6
| 35,633
|
/*
* Copyright 2002-2019 Barcelona Supercomputing Center (www.bsc.es)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package es.bsc.compss.components.impl;
import es.bsc.compss.api.TaskMonitor;
import es.bsc.compss.components.monitor.impl.GraphGenerator;
import es.bsc.compss.log.Loggers;
import es.bsc.compss.types.annotations.parameter.DataType;
import es.bsc.compss.types.annotations.parameter.OnFailure;
import es.bsc.compss.types.TaskDescription;
import es.bsc.compss.types.Task;
import es.bsc.compss.types.Task.TaskState;
import es.bsc.compss.types.data.DataInfo;
import es.bsc.compss.types.data.DataInstanceId;
import es.bsc.compss.types.data.accessid.RAccessId;
import es.bsc.compss.types.data.accessid.RWAccessId;
import es.bsc.compss.types.data.accessid.WAccessId;
import es.bsc.compss.types.data.AccessParams.*;
import es.bsc.compss.types.data.DataAccessId;
import es.bsc.compss.types.data.DataAccessId.*;
import es.bsc.compss.types.data.operation.ResultListener;
import es.bsc.compss.types.implementations.Implementation.TaskType;
import es.bsc.compss.types.parameter.BindingObjectParameter;
import es.bsc.compss.types.parameter.CollectionParameter;
import es.bsc.compss.types.parameter.DependencyParameter;
import es.bsc.compss.types.parameter.ExternalPSCOParameter;
import es.bsc.compss.types.parameter.FileParameter;
import es.bsc.compss.types.parameter.ObjectParameter;
import es.bsc.compss.types.parameter.Parameter;
import es.bsc.compss.types.request.ap.EndOfAppRequest;
import es.bsc.compss.types.request.ap.WaitForConcurrentRequest;
import es.bsc.compss.types.request.ap.BarrierRequest;
import es.bsc.compss.types.request.ap.WaitForTaskRequest;
import es.bsc.compss.util.ErrorManager;
import java.util.Hashtable;
import java.util.LinkedList;
import java.util.List;
import java.util.HashMap;
import java.util.HashSet;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.Map.Entry;
import java.util.concurrent.Semaphore;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import storage.StubItf;
/**
* Class to analyze the data dependencies between tasks
*/
public class TaskAnalyser {
// Components
private DataInfoProvider DIP;
private GraphGenerator GM;
// <File id, Last writer task> table
private TreeMap<Integer, Task> writers;
// Method information
private HashMap<Integer, Integer> currentTaskCount;
// Map: app id -> task count
private HashMap<Long, Integer> appIdToTotalTaskCount;
// Map: app id -> task count
private HashMap<Long, Integer> appIdToTaskCount;
// Map: app id -> semaphore to notify end of app
private HashMap<Long, Semaphore> appIdToSemaphore;
// List of appIds stopped on a barrier synchronization point
private HashSet<Long> appIdBarrierFlags;
// Map: app id -> set of written data ids (for result files)
private HashMap<Long, TreeSet<Integer>> appIdToWrittenFiles;
// Map: app id -> set of written data ids (for result SCOs)
private HashMap<Long, TreeSet<Integer>> appIdToSCOWrittenIds;
// Tasks being waited on: taskId -> list of semaphores where to notify end of task
private Hashtable<Task, List<Semaphore>> waitedTasks;
// Concurrent tasks being waited on: taskId -> semaphore where to notify end of task
private TreeMap<Integer, List<Task>> concurrentAccessMap;
// Logger
private static final Logger LOGGER = LogManager.getLogger(Loggers.TA_COMP);
private static final boolean DEBUG = LOGGER.isDebugEnabled();
private static final String TASK_FAILED = "Task failed: ";
private static final String TASK_CANCELED = "Task canceled: ";
// Graph drawing
private static final boolean IS_DRAW_GRAPH = GraphGenerator.isEnabled();
private int synchronizationId;
private boolean taskDetectedAfterSync;
/**
* Creates a new Task Analyser instance
*/
public TaskAnalyser() {
this.currentTaskCount = new HashMap<>();
this.writers = new TreeMap<>();
this.appIdToTaskCount = new HashMap<>();
this.appIdToTotalTaskCount = new HashMap<>();
this.appIdToSemaphore = new HashMap<>();
this.appIdBarrierFlags = new HashSet<>();
this.appIdToWrittenFiles = new HashMap<>();
this.appIdToSCOWrittenIds = new HashMap<>();
this.waitedTasks = new Hashtable<>();
this.concurrentAccessMap = new TreeMap<>();
synchronizationId = 0;
taskDetectedAfterSync = false;
LOGGER.info("Initialization finished");
}
/**
* Sets the TaskAnalyser co-workers
*
* @param DIP
*/
public void setCoWorkers(DataInfoProvider DIP) {
this.DIP = DIP;
}
/**
* Sets the graph generator co-worker
*
* @param GM
*/
public void setGM(GraphGenerator GM) {
this.GM = GM;
}
private DataAccessId registerParameterAccessAndAddDependencies(Task currentTask, boolean isConstraining,
Parameter p) {
// Conversion: direction -> access mode
AccessMode am = AccessMode.R;
switch (p.getDirection()) {
case IN:
am = AccessMode.R;
break;
case OUT:
am = AccessMode.W;
break;
case INOUT:
am = AccessMode.RW;
break;
case CONCURRENT:
am = AccessMode.C;
break;
}
// Inform the Data Manager about the new accesses
DataAccessId daId = null;
switch (p.getType()) {
case FILE_T:
FileParameter fp = (FileParameter) p;
daId = this.DIP.registerFileAccess(am, fp.getLocation());
break;
case PSCO_T:
ObjectParameter pscop = (ObjectParameter) p;
// Check if its PSCO class and persisted to infer its type
pscop.setType(DataType.PSCO_T);
daId = this.DIP.registerObjectAccess(am, pscop.getValue(), pscop.getCode());
break;
case EXTERNAL_PSCO_T:
ExternalPSCOParameter externalPSCOparam = (ExternalPSCOParameter) p;
// Check if its PSCO class and persisted to infer its type
externalPSCOparam.setType(DataType.EXTERNAL_PSCO_T);
daId = DIP.registerExternalPSCOAccess(am, externalPSCOparam.getId(), externalPSCOparam.getCode());
break;
case BINDING_OBJECT_T:
BindingObjectParameter bindingObjectparam = (BindingObjectParameter) p;
// Check if its Binding OBJ and register its access
bindingObjectparam.setType(DataType.BINDING_OBJECT_T);
daId = DIP.registerBindingObjectAccess(am, bindingObjectparam.getBindingObject(),
bindingObjectparam.getCode());
break;
case OBJECT_T:
ObjectParameter op = (ObjectParameter) p;
// Check if its PSCO class and persisted to infer its type
if (op.getValue() instanceof StubItf && ((StubItf) op.getValue()).getID() != null) {
op.setType(DataType.PSCO_T);
}
daId = this.DIP.registerObjectAccess(am, op.getValue(), op.getCode());
break;
case COLLECTION_T:
CollectionParameter cp = (CollectionParameter) p;
for (Parameter content : cp.getParameters()) {
registerParameterAccessAndAddDependencies(currentTask, isConstraining, content);
}
daId = DIP.registerCollectionAccess(am, cp);
break;
default:
// This is a basic type, there are no accesses to register
return null;
}
DependencyParameter dp = (DependencyParameter) p;
dp.setDataAccessId(daId);
addDependencies(am, currentTask, isConstraining, dp);
return daId;
}
private void addDependencies(AccessMode am, Task currentTask, boolean isConstraining, DependencyParameter dp) {
// Add dependencies to the graph and register output values for future dependencies
DataAccessId daId = dp.getDataAccessId();
switch (am) {
case R:
if (!dataWasAccessedConcurrent(daId.getDataId())) {
checkDependencyForRead(currentTask, dp);
} else {
checkDependencyForConcurrent(currentTask, dp);
}
if (isConstraining) {
RAccessId raId = (RAccessId) dp.getDataAccessId();
DataInstanceId dependingDataId = raId.getReadDataInstance();
if (dependingDataId != null) {
if (dependingDataId.getVersionId() > 1) {
currentTask.setEnforcingTask(this.writers.get(dependingDataId.getDataId()));
}
}
}
break;
case RW:
if (!dataWasAccessedConcurrent(daId.getDataId())) {
checkDependencyForRead(currentTask, dp);
} else {
checkDependencyForConcurrent(currentTask, dp);
removeFromConcurrentAccess(dp.getDataAccessId().getDataId());
}
if (isConstraining) {
RWAccessId raId = (RWAccessId) dp.getDataAccessId();
DataInstanceId dependingDataId = raId.getReadDataInstance();
if (dependingDataId != null) {
if (dependingDataId.getVersionId() > 1) {
currentTask.setEnforcingTask(this.writers.get(dependingDataId.getDataId()));
}
}
}
registerOutputValues(currentTask, dp);
break;
case W:
if (dataWasAccessedConcurrent(daId.getDataId())) {
removeFromConcurrentAccess(dp.getDataAccessId().getDataId());
}
registerOutputValues(currentTask, dp);
break;
case C:
checkDependencyForRead(currentTask, dp);
List<Task> tasks = this.concurrentAccessMap.get(daId.getDataId());
if (tasks == null) {
tasks = new LinkedList<Task>();
this.concurrentAccessMap.put(daId.getDataId(), tasks);
}
tasks.add(currentTask);
break;
}
}
/**
* Process the dependencies of a new task @currentTask
*
* @param currentTask
*/
public void processTask(Task currentTask) {
TaskDescription params = currentTask.getTaskDescription();
LOGGER.info("New " + (params.getType() == TaskType.METHOD ? "method" : "service") + " task(" + params.getName()
+ "), ID = " + currentTask.getId());
if (IS_DRAW_GRAPH) {
addNewTask(currentTask);
}
// Update task count
Integer methodId = params.getId();
Integer actualCount = this.currentTaskCount.get(methodId);
if (actualCount == null) {
actualCount = 0;
}
this.currentTaskCount.put(methodId, actualCount + 1);
// Update app id task count
Long appId = currentTask.getAppId();
Integer taskCount = this.appIdToTaskCount.get(appId);
if (taskCount == null) {
taskCount = 0;
}
taskCount++;
this.appIdToTaskCount.put(appId, taskCount);
Integer totalTaskCount = this.appIdToTotalTaskCount.get(appId);
if (totalTaskCount == null) {
totalTaskCount = 0;
}
totalTaskCount++;
this.appIdToTotalTaskCount.put(appId, totalTaskCount);
// Check scheduling enforcing data
int constrainingParam = -1;
if (params.getType() == TaskType.SERVICE && params.hasTargetObject()) {
constrainingParam = params.getParameters().length - 1 - params.getNumReturns();
}
Parameter[] parameters = params.getParameters();
for (int paramIdx = 0; paramIdx < parameters.length; paramIdx++) {
registerParameterAccessAndAddDependencies(currentTask, paramIdx == constrainingParam, parameters[paramIdx]);
}
}
private void updateParameterAccess(Task t, Parameter p) {
DataType type = p.getType();
if( type == DataType.COLLECTION_T ) {
for(Parameter subParam: ((CollectionParameter)p).getParameters()) {
updateParameterAccess(t, subParam);
}
}
if (type == DataType.FILE_T || type == DataType.OBJECT_T || type == DataType.PSCO_T
|| type == DataType.EXTERNAL_PSCO_T || type == DataType.BINDING_OBJECT_T
|| type == DataType.COLLECTION_T) {
DependencyParameter dPar = (DependencyParameter) p;
DataAccessId dAccId = dPar.getDataAccessId();
if (DEBUG) {
LOGGER.debug("Treating that data " + dAccId + " has been accessed at " + dPar.getDataTarget());
}
if (t.getOnFailure() == OnFailure.CANCEL_SUCCESSORS
&& (t.getStatus() == TaskState.FAILED || t.getStatus() == TaskState.CANCELED)) {
this.DIP.dataAccessHasBeenCanceled(dAccId);
} else {
this.DIP.dataHasBeenAccessed(dAccId);
}
}
}
/**
* Registers the end of execution of task @task
*
* @param task
*/
public void endTask(Task task) {
int taskId = task.getId();
boolean isFree = task.isFree();
TaskState taskState = task.getStatus();
OnFailure onFailure = task.getOnFailure();
LOGGER.info("Notification received for task " + taskId + " with end status " + taskState);
// Check status
if (!isFree) {
LOGGER.debug("Task " + taskId + " is not registered as free. Waiting for other executions to end");
return;
}
TaskMonitor registeredMonitor = task.getTaskMonitor();
switch (taskState) {
case FAILED:
registeredMonitor.onFailure();
if (onFailure == OnFailure.RETRY || onFailure == OnFailure.FAIL) {
ErrorManager.error(TASK_FAILED + task);
return;
}
if (onFailure == OnFailure.IGNORE || onFailure == OnFailure.CANCEL_SUCCESSORS) {
// Show warning
ErrorManager.warn(TASK_FAILED + task);
}
break;
case CANCELED:
registeredMonitor.onCancellation();
// Show warning
ErrorManager.warn(TASK_CANCELED + task);
break;
default:
registeredMonitor.onCompletion();
}
/*
* Treat end of task
*/
LOGGER.debug("Ending task " + taskId);
// Free dependencies
Long appId = task.getAppId();
Integer taskCount = this.appIdToTaskCount.get(appId) - 1;
this.appIdToTaskCount.put(appId, taskCount);
if (taskCount == 0) {
// Remove the appId from the barrier flags (if existent, otherwise do nothing)
this.appIdBarrierFlags.remove(appId);
Semaphore sem = this.appIdToSemaphore.remove(appId);
if (sem != null) {
// Application was synchronized on a barrier flag or a no more tasks
// Release the application semaphore
this.appIdToTaskCount.remove(appId);
sem.release();
}
}
// Check if task is being waited
List<Semaphore> sems = this.waitedTasks.remove(task);
if (sems != null) {
for (Semaphore sem : sems) {
sem.release();
}
}
for (Parameter param : task.getTaskDescription().getParameters()) {
updateParameterAccess(task, param);
}
// Check if the finished task was the last writer of a file, but only if task generation has finished
// Task generation is finished if we are on noMoreTasks but we are not on a barrier
if (this.appIdToSemaphore.get(appId) != null && !this.appIdBarrierFlags.contains(appId)) {
checkResultFileTransfer(task);
}
// Release data dependent tasks
task.releaseDataDependents();
}
/**
* Checks if a finished task is the last writer of its file parameters and, eventually, order the necessary
* transfers
*
* @param t
*/
private void checkResultFileTransfer(Task t) {
LinkedList<DataInstanceId> fileIds = new LinkedList<>();
for (Parameter p : t.getTaskDescription().getParameters()) {
switch (p.getType()) {
case FILE_T:
FileParameter fp = (FileParameter) p;
switch (fp.getDirection()) {
case IN:
case CONCURRENT:
break;
case INOUT:
DataInstanceId dId = ((RWAccessId) fp.getDataAccessId()).getWrittenDataInstance();
if (this.writers.get(dId.getDataId()) == t) {
fileIds.add(dId);
}
break;
case OUT:
dId = ((WAccessId) fp.getDataAccessId()).getWrittenDataInstance();
if (this.writers.get(dId.getDataId()) == t) {
fileIds.add(dId);
}
break;
}
break;
default:
break;
}
}
// Order the transfer of the result files
final int numFT = fileIds.size();
if (numFT > 0) {
// List<ResultFile> resFiles = new ArrayList<ResultFile>(numFT);
for (DataInstanceId fileId : fileIds) {
try {
int id = fileId.getDataId();
this.DIP.blockDataAndGetResultFile(id, new ResultListener(new Semaphore(0)));
this.DIP.unblockDataId(id);
} catch (Exception e) {
LOGGER.error("Exception ordering transfer when task ends", e);
}
}
}
}
/**
* Returns the tasks dependent to the requested task
*
* @param request
*/
public void findWaitedTask(WaitForTaskRequest request) {
int dataId = request.getDataId();
AccessMode am = request.getAccessMode();
Semaphore sem = request.getSemaphore();
Task lastWriter = this.writers.get(dataId);
if (lastWriter != null) {
treatDataAccess(lastWriter, am, dataId);
}
// Release task if possible. Otherwise add to waiting
if (lastWriter == null || lastWriter.getStatus() == TaskState.FINISHED) {
sem.release();
} else {
List<Semaphore> list = this.waitedTasks.get(lastWriter);
if (list == null) {
list = new LinkedList<>();
this.waitedTasks.put(lastWriter, list);
}
list.add(sem);
}
}
/**
* Checks how the data was accessed
*
* @param lastWriter
* @param am
* @param dataId
*/
private void treatDataAccess(Task lastWriter, AccessMode am, int dataId) {
// Add to writers if needed
if (am == AccessMode.RW) {
this.writers.put(dataId, null);
}
// Add graph description
if (IS_DRAW_GRAPH) {
TreeSet<Integer> toPass = new TreeSet<>();
toPass.add(dataId);
DataInstanceId dii = DIP.getLastVersions(toPass).get(0);
int dataVersion = dii.getVersionId();
addEdgeFromTaskToMain(lastWriter, dataId, dataVersion);
}
}
/**
* Check if data is of type concurrent
*
* @param daId
*/
public boolean dataWasAccessedConcurrent(int daId) {
List<Task> concurrentAccess = this.concurrentAccessMap.get(daId);
if (concurrentAccess != null) {
return true;
} else {
return false;
}
}
/**
* Returns the concurrent tasks dependent to the requested task
*
* @param request
*/
public void findWaitedConcurrent(WaitForConcurrentRequest request) {
int dataId = request.getDataId();
AccessMode am = request.getAccessMode();
List<Task> concurrentAccess = this.concurrentAccessMap.get(dataId);
if (concurrentAccess != null) {
// Add to writers if needed
this.concurrentAccessMap.put(dataId, null);
}
Semaphore semTasks = request.getTaskSemaphore();
int n = 0;
for (Task task : concurrentAccess) {
treatDataAccess(task, am, dataId);
if (task.getStatus() != TaskState.FINISHED) {
n++;
List<Semaphore> list = waitedTasks.get(task);
if (list == null) {
list = new LinkedList<>();
this.waitedTasks.put(task, list);
}
list.add(semTasks);
}
}
request.setNumWaitedTasks(n);
request.getSemaphore().release();
}
/**
* Barrier
*
* @param request
*/
public void barrier(BarrierRequest request) {
Long appId = request.getAppId();
Integer count = this.appIdToTaskCount.get(appId);
if (IS_DRAW_GRAPH) {
addNewBarrier();
// We can draw the graph on a barrier while we wait for tasks
this.GM.commitGraph();
}
// Release the semaphore only if all application tasks have finished
if (count == null || count == 0) {
request.getSemaphore().release();
} else {
this.appIdBarrierFlags.add(appId);
this.appIdToSemaphore.put(appId, request.getSemaphore());
}
}
/**
* End of execution barrier
*
* @param request
*/
public void noMoreTasks(EndOfAppRequest request) {
Long appId = request.getAppId();
Integer count = this.appIdToTaskCount.get(appId);
if (IS_DRAW_GRAPH) {
this.GM.commitGraph();
}
if (count == null || count == 0) {
this.appIdToTaskCount.remove(appId);
request.getSemaphore().release();
} else {
this.appIdToSemaphore.put(appId, request.getSemaphore());
}
}
/**
* Returns the written files and deletes them
*
* @param appId
*
* @return
*/
public TreeSet<Integer> getAndRemoveWrittenFiles(Long appId) {
return this.appIdToWrittenFiles.remove(appId);
}
/**
* Shutdown
*/
public void shutdown() {
if (IS_DRAW_GRAPH) {
GraphGenerator.removeTemporaryGraph();
}
}
/**
* Returns the task state
*
* @return
*/
public String getTaskStateRequest() {
StringBuilder sb = new StringBuilder("\t").append("<TasksInfo>").append("\n");
for (Entry<Long, Integer> e : this.appIdToTotalTaskCount.entrySet()) {
Long appId = e.getKey();
Integer totalTaskCount = e.getValue();
Integer taskCount = this.appIdToTaskCount.get(appId);
if (taskCount == null) {
taskCount = 0;
}
int completed = totalTaskCount - taskCount;
sb.append("\t\t").append("<Application id=\"").append(appId).append("\">").append("\n");
sb.append("\t\t\t").append("<TotalCount>").append(totalTaskCount).append("</TotalCount>").append("\n");
sb.append("\t\t\t").append("<InProgress>").append(taskCount).append("</InProgress>").append("\n");
sb.append("\t\t\t").append("<Completed>").append(completed).append("</Completed>").append("\n");
sb.append("\t\t").append("</Application>").append("\n");
}
sb.append("\t").append("</TasksInfo>").append("\n");
return sb.toString();
}
/**
* Deletes the specified data and its renamings
*
* @param dataInfo
*/
public void deleteData(DataInfo dataInfo) {
int dataId = dataInfo.getDataId();
LOGGER.debug("Deleting data with id " + dataId);
Task task = writers.remove(dataId);
if (task != null) {
return;
}
LOGGER.debug("Removing " + dataInfo.getDataId() + " from written files");
for (TreeSet<Integer> files : appIdToWrittenFiles.values()) {
files.remove(dataInfo.getDataId());
}
}
/**
* Removes the tasks that have accessed the data in a concurrent way
*
* @param dataId
*/
public void removeFromConcurrentAccess(int dataId) {
List<Task> returnedValue = this.concurrentAccessMap.remove(dataId);
if (returnedValue == null) {
LOGGER.debug("The concurrent list could not be removed");
}
}
/*
**************************************************************************************************************
* DATA DEPENDENCY MANAGEMENT PRIVATE METHODS
**************************************************************************************************************/
/**
* Checks the dependencies of a task @currentTask considering the parameter @dp
*
* @param currentTask
* @param dp
*/
private void checkDependencyForRead(Task currentTask, DependencyParameter dp) {
int dataId = dp.getDataAccessId().getDataId();
Task lastWriter = this.writers.get(dataId);
if (lastWriter != null && lastWriter != currentTask) {
if (DEBUG) {
LOGGER.debug(
"Last writer for datum " + dp.getDataAccessId().getDataId() + " is task " + lastWriter.getId());
LOGGER.debug(
"Adding dependency between task " + lastWriter.getId() + " and task " + currentTask.getId());
}
// Add dependency
currentTask.addDataDependency(lastWriter);
} else {
if (DEBUG) {
LOGGER.debug("There is no last writer for datum " + dp.getDataAccessId().getDataId());
}
}
// Handle when -g enabled
if (IS_DRAW_GRAPH) {
drawEdges(currentTask, dp, dataId, lastWriter);
}
}
/**
* Adds edges to graph
*
* @param currentTask
* @param dp
* @param dataId
* @param lastWriter
*/
private void drawEdges(Task currentTask, DependencyParameter dp, int dataId, Task lastWriter) {
int dataVersion = -1;
Direction d = dp.getDataAccessId().getDirection();
switch (d) {
case C:
case R:
dataVersion = ((RAccessId) dp.getDataAccessId()).getRVersionId();
break;
case W:
dataVersion = ((WAccessId) dp.getDataAccessId()).getWVersionId();
break;
default:
dataVersion = ((RWAccessId) dp.getDataAccessId()).getRVersionId();
break;
}
if (lastWriter != null && lastWriter != currentTask) {
addEdgeFromTaskToTask(lastWriter, currentTask, dataId, dataVersion);
} else {
addEdgeFromMainToTask(currentTask, dataId, dataVersion);
}
}
/**
* Checks the dependencies of a task @currentTask considering the parameter @dp
*
* @param currentTask
* @param dp
*/
private void checkDependencyForConcurrent(Task currentTask, DependencyParameter dp) {
int dataId = dp.getDataAccessId().getDataId();
List<Task> tasks = this.concurrentAccessMap.get(dataId);
if (concurrentAccessMap != null && tasks.contains(currentTask) == false) {
if (DEBUG) {
LOGGER.debug("There was a concurrent access for datum " + dataId);
LOGGER.debug("Adding dependency between list and task " + currentTask.getId());
}
for (Task t : tasks) {
// Add dependency
currentTask.addDataDependency(t);
if (IS_DRAW_GRAPH) {
drawEdges(currentTask, dp, dataId, t);
}
}
} else {
if (DEBUG) {
LOGGER.debug("There is no last writer for datum " + dataId);
}
}
}
/**
* Registers the output values of the task @currentTask
*
* @param currentTask
* @param dp
*/
private void registerOutputValues(Task currentTask, DependencyParameter dp) {
int currentTaskId = currentTask.getId();
int dataId = dp.getDataAccessId().getDataId();
Long appId = currentTask.getAppId();
// Update global last writer
this.writers.put(dataId, currentTask);
// Update file and PSCO lists
switch (dp.getType()) {
case FILE_T:
TreeSet<Integer> fileIdsWritten = this.appIdToWrittenFiles.get(appId);
if (fileIdsWritten == null) {
fileIdsWritten = new TreeSet<>();
this.appIdToWrittenFiles.put(appId, fileIdsWritten);
}
fileIdsWritten.add(dataId);
break;
case PSCO_T:
TreeSet<Integer> pscoIdsWritten = this.appIdToSCOWrittenIds.get(appId);
if (pscoIdsWritten == null) {
pscoIdsWritten = new TreeSet<>();
this.appIdToSCOWrittenIds.put(appId, pscoIdsWritten);
}
pscoIdsWritten.add(dataId);
break;
default:
// Nothing to do with basic types
// Objects are not checked, their version will be only get if the main accesses them
break;
}
if (DEBUG) {
LOGGER.debug("New writer for datum " + dp.getDataAccessId().getDataId() + " is task " + currentTaskId);
}
}
/*
**************************************************************************************************************
* GRAPH WRAPPERS
**************************************************************************************************************/
/**
* We have detected a new task, register it into the graph STEPS: Only adds the node
*
* @param task
*/
private void addNewTask(Task task) {
// Add task to graph
this.GM.addTaskToGraph(task);
// Set the syncId of the task
task.setSynchronizationId(this.synchronizationId);
// Update current sync status
taskDetectedAfterSync = true;
}
/**
* We will execute a task whose data is produced by another task. STEPS: Add an edge from the previous task or the
* last synchronization point to the new task
*
* @param source
* @param dest
* @param dataId
*/
private void addEdgeFromTaskToTask(Task source, Task dest, int dataId, int dataVersion) {
if (source.getSynchronizationId() == dest.getSynchronizationId()) {
String src = String.valueOf(source.getId());
String dst = String.valueOf(dest.getId());
String dep = String.valueOf(dataId) + "v" + String.valueOf(dataVersion);
this.GM.addEdgeToGraph(src, dst, dep);
} else {
String src = "Synchro" + dest.getSynchronizationId();
String dst = String.valueOf(dest.getId());
String dep = String.valueOf(dataId) + "v" + String.valueOf(dataVersion);
this.GM.addEdgeToGraph(src, dst, dep);
}
}
/**
* We will execute a task with no predecessors, data must be retrieved from the last synchronization point. STEPS:
* Add edge from sync to task
*
* @param dest
* @param dataId
*/
private void addEdgeFromMainToTask(Task dest, int dataId, int dataVersion) {
String src = "Synchro" + dest.getSynchronizationId();
String dst = String.valueOf(dest.getId());
String dep = String.valueOf(dataId) + "v" + String.valueOf(dataVersion);
this.GM.addEdgeToGraph(src, dst, dep);
}
/**
* We have accessed to data produced by a task from the main code STEPS: Adds a new synchronization point if any
* task has been created Adds a dependency from task to synchronization
*
* @param task
* @param dataId
*/
private void addEdgeFromTaskToMain(Task task, int dataId, int dataVersion) {
// Add Sync if any task has been created
if (this.taskDetectedAfterSync) {
this.taskDetectedAfterSync = false;
int oldSyncId = this.synchronizationId;
this.synchronizationId++;
this.GM.addSynchroToGraph(this.synchronizationId);
if (this.synchronizationId > 1) {
String oldSync = "Synchro" + oldSyncId;
String currentSync = "Synchro" + this.synchronizationId;
this.GM.addEdgeToGraph(oldSync, currentSync, "");
}
}
// Add edge from task to sync
String src = String.valueOf(task.getId());
String dest = "Synchro" + this.synchronizationId;
this.GM.addEdgeToGraph(src, dest, String.valueOf(dataId) + "v" + String.valueOf(dataVersion));
}
/**
* We have explicitly called the barrier API. STEPS: Add a new synchronization node. Add an edge from last
* synchronization point to barrier. Add edges from writer tasks to barrier
*/
private void addNewBarrier() {
// Add barrier node
int oldSync = this.synchronizationId;
this.synchronizationId++;
this.taskDetectedAfterSync = false;
this.GM.addBarrierToGraph(this.synchronizationId);
// Add edge from last sync
String newSync_str = "Synchro" + this.synchronizationId;
String oldSync_str = "Synchro" + oldSync;
if (this.synchronizationId > 1) {
this.GM.addEdgeToGraph(oldSync_str, newSync_str, "");
}
// Add edges from writers to barrier
HashSet<Task> uniqueWriters = new HashSet<>(this.writers.values());
for (Task writer : uniqueWriters) {
if (writer != null && writer.getSynchronizationId() == oldSync) {
String taskId = String.valueOf(writer.getId());
this.GM.addEdgeToGraph(taskId, newSync_str, "");
}
}
}
}
| 37.547945
| 120
| 0.570791
|
698decc751a017cb1eab794a10196f25fa871905
| 1,853
|
package org.opencv.ml;
import org.opencv.core.*;
public class StatModel extends Algorithm
{
public static final int COMPRESSED_INPUT = 2;
public static final int PREPROCESSED_INPUT = 4;
public static final int RAW_OUTPUT = 1;
public static final int UPDATE_MODEL = 1;
protected StatModel(final long n) {
super(n);
}
private static native void delete(final long p0);
private static native boolean empty_0(final long p0);
private static native int getVarCount_0(final long p0);
private static native boolean isClassifier_0(final long p0);
private static native boolean isTrained_0(final long p0);
private static native float predict_0(final long p0, final long p1, final long p2, final int p3);
private static native float predict_1(final long p0, final long p1);
private static native boolean train_0(final long p0, final long p1, final int p2, final long p3);
public boolean empty() {
return empty_0(this.nativeObj);
}
@Override
protected void finalize() throws Throwable {
delete(this.nativeObj);
}
public int getVarCount() {
return getVarCount_0(this.nativeObj);
}
public boolean isClassifier() {
return isClassifier_0(this.nativeObj);
}
public boolean isTrained() {
return isTrained_0(this.nativeObj);
}
public float predict(final Mat mat) {
return predict_1(this.nativeObj, mat.nativeObj);
}
public float predict(final Mat mat, final Mat mat2, final int n) {
return predict_0(this.nativeObj, mat.nativeObj, mat2.nativeObj, n);
}
public boolean train(final Mat mat, final int n, final Mat mat2) {
return train_0(this.nativeObj, mat.nativeObj, n, mat2.nativeObj);
}
}
| 28.507692
| 101
| 0.662169
|
93a8bf7928580a9471c32245f4bf82485021f66a
| 7,268
|
package me.nullicorn.nedit;
import static me.nullicorn.nedit.IOTestHelper.TEST_BYTE;
import static me.nullicorn.nedit.IOTestHelper.TEST_DOUBLE;
import static me.nullicorn.nedit.IOTestHelper.TEST_FLOAT;
import static me.nullicorn.nedit.IOTestHelper.TEST_INT;
import static me.nullicorn.nedit.IOTestHelper.TEST_LONG;
import static me.nullicorn.nedit.IOTestHelper.TEST_SHORT;
import static me.nullicorn.nedit.IOTestHelper.TEST_STRING;
import static me.nullicorn.nedit.IOTestHelper.createTestByteArray;
import static me.nullicorn.nedit.IOTestHelper.createTestDoubleList;
import static me.nullicorn.nedit.IOTestHelper.createTestEmptyList;
import static me.nullicorn.nedit.IOTestHelper.createTestIntArray;
import static me.nullicorn.nedit.IOTestHelper.createTestLongArray;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import me.nullicorn.nedit.type.NBTCompound;
import me.nullicorn.nedit.type.TagType;
import org.junit.jupiter.api.Test;
class NBTOutputStreamTests {
@Test
void shouldEncodePrimitivesCorrectly() throws IOException {
tryWrite((int) TEST_BYTE, NBTOutputStream::writeByte, DataOutputStream::writeByte);
tryWrite((int) TEST_SHORT, NBTOutputStream::writeShort, DataOutputStream::writeShort);
tryWrite(TEST_INT, NBTOutputStream::writeInt, DataOutputStream::writeInt);
tryWrite(TEST_LONG, NBTOutputStream::writeLong, DataOutputStream::writeLong);
tryWrite(TEST_FLOAT, NBTOutputStream::writeFloat, DataOutputStream::writeFloat);
tryWrite(TEST_DOUBLE, NBTOutputStream::writeDouble, DataOutputStream::writeDouble);
tryWrite(TEST_STRING, NBTOutputStream::writeString, DataOutputStream::writeUTF);
}
@Test
void shouldEncodeIterablesCorrectly() throws IOException {
tryWrite(createTestByteArray(), NBTOutputStream::writeByteArray, (out, array) -> {
out.writeInt(array.length);
out.write(array);
});
tryWrite(createTestIntArray(), NBTOutputStream::writeIntArray, (out, array) -> {
out.writeInt(array.length);
for (int value : array) {
out.writeInt(value);
}
});
tryWrite(createTestLongArray(), NBTOutputStream::writeLongArray, (out, array) -> {
out.writeInt(array.length);
for (long value : array) {
out.writeLong(value);
}
});
tryWrite(createTestEmptyList(), NBTOutputStream::writeList, (out, list) -> {
out.writeByte(TagType.END.getId());
out.writeInt(0);
});
tryWrite(createTestDoubleList(), NBTOutputStream::writeList, (out, list) -> {
out.writeByte(TagType.DOUBLE.getId());
out.writeInt(list.size());
for (int i = 0; i < list.size(); i++) {
out.writeDouble(list.getDouble(i));
}
});
}
@Test
void shouldEncodeEmptyCompoundsCorrectly() throws IOException {
tryWrite(new NBTCompound(), NBTOutputStream::writeCompound,
(out, compound) -> out.writeByte(TagType.END.getId()));
}
@Test
void shouldEncodeCompoundsCorrectly() throws IOException {
NBTCompound expected = new NBTCompound();
NBTCompound nested = new NBTCompound();
nested.put("nested_string", TEST_STRING);
expected.put("byte", TEST_BYTE);
expected.put("int", TEST_INT);
expected.put("string", TEST_STRING);
expected.put("double", TEST_DOUBLE);
expected.put("compound", nested);
expected.put("byte_array", createTestByteArray());
ByteArrayOutputStream bytesOut = new ByteArrayOutputStream();
new NBTOutputStream(bytesOut, false).writeCompound(expected);
DataInputStream in = new DataInputStream(new ByteArrayInputStream(bytesOut.toByteArray()));
int tagId;
NBTCompound actual = new NBTCompound();
while ((tagId = in.readByte()) != TagType.END.getId()) {
assertNotEquals(-1, tagId, "Compound ended unexpectedly");
TagType type = TagType.fromId(tagId);
assertNotNull(type, "Child tag had an invalid type (id=" + tagId + ")");
String name = in.readUTF();
assertFalse(name.isEmpty(), "Unexpected empty tag name");
assertTrue(expected.containsTag(name, type));
Object value;
switch (name) {
case "byte":
value = in.readByte();
break;
case "int":
value = in.readInt();
break;
case "string":
value = in.readUTF();
break;
case "double":
value = in.readDouble();
break;
case "byte_array":
int length = in.readInt();
value = new byte[length];
in.readFully((byte[]) value);
break;
case "compound":
int nestedTagId = in.readByte();
assertEquals(TagType.STRING.getId(), nestedTagId, "Incorrect nested tag ID");
assertEquals("nested_string", in.readUTF(), "Incorrect nested value");
assertEquals(TEST_STRING, in.readUTF());
assertEquals(TagType.END.getId(), in.readByte(), "Nested compound not closed");
// If those assertions pass ^, the nested value is known to be valid.
value = nested;
break;
default:
throw new IOException("Unexpected tag \"" + name + "\" with ID " + tagId);
}
actual.put(name, value);
}
assertEquals(-1, in.read(), "Written compound continues unexpectedly");
assertEquals(expected, actual);
}
private <T> void tryWrite(T value, NBTWriterFunction<T> actualWriter, WriterFunction<T> expectedWriter) throws IOException {
ByteArrayOutputStream actualBytesOut = new ByteArrayOutputStream();
ByteArrayOutputStream expectedBytesOut = new ByteArrayOutputStream();
actualWriter.write(new NBTOutputStream(actualBytesOut, false), value);
expectedWriter.write(new DataOutputStream(expectedBytesOut), value);
assertArrayEquals(
expectedBytesOut.toByteArray(),
actualBytesOut.toByteArray(),
"Incorrect encoding for " + value.getClass().getSimpleName());
}
private interface NBTWriterFunction<T> {
void write(NBTOutputStream out, T value) throws IOException;
}
private interface WriterFunction<T> {
void write(DataOutputStream out, T value) throws IOException;
}
}
| 41.295455
| 128
| 0.642267
|
08f3c1a1604177b769c66c38591e448a8b522b48
| 704
|
package sk.vander.contacts.ui;
import dagger.Module;
import dagger.Provides;
import sk.vander.contacts.base.ActivityHierarchyServer;
import sk.vander.contacts.base.AppContainer;
import sk.vander.contacts.base.annotation.ActivityScreenSwitcherServer;
import sk.vander.contacts.base.annotation.ApplicationScope;
/**
* Created by arashid on 26/06/16.
*/
@Module(includes = UiModule.class)
public class ReleaseUiModule {
@Provides @ApplicationScope AppContainer providesAppContainer() {
return AppContainer.DEFAULT;
}
@Provides @ApplicationScope
ActivityHierarchyServer provideActivityHierarchyServer(@ActivityScreenSwitcherServer ActivityHierarchyServer server) {
return server;
}
}
| 28.16
| 120
| 0.809659
|
32bd8bc9d79ace1dc17e89a974d551e83e277d59
| 6,836
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.bullseye;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.geom.Point2D;
import java.awt.image.BufferedImage;
/**
*
* @author jon
*/
public class BullseyePageScanner
{
public static final int TOPLEFT = 0;
public static final int TOPRIGHT = 1;
public static final int BOTTOMRIGHT = 2;
public static final int BOTTOMLEFT = 3;
public static final int NONE = -1;
BullseyeLocator locator;
float width, height, radius;
int vdiv, minorcount;
float minradius;
Point2D.Float[] expectedpoints;
int topi, bottomi, lefti, righti, origini;
Point2D.Float expectedorigin;
/**
*
* @param width in hundredths of inch
* @param height
* @param topleft
* @param bottomleft
* @param bottomright
* @param radius
* @param subradii as proportion of radius
*/
public BullseyePageScanner(
float width, float height,
Point2D.Float[] expectedpoints,
float radius, double[] subradii,
int vdiv, float minradius
)
{
this.width = width;
this.height = height;
this.radius = radius;
this.minradius = minradius;
this.vdiv = vdiv;
if ( this.vdiv < 1 ) this.vdiv = 1;
this.minorcount = this.vdiv-1;
this.expectedpoints = new Point2D.Float[4+minorcount];
for ( int i=0; i<4; i++ )
{
if ( expectedpoints[i] != null )
this.expectedpoints[i] = (Point2D.Float)expectedpoints[i].clone();
}
if ( this.expectedpoints[TOPLEFT] != null )
{
this.expectedorigin = this.expectedpoints[TOPLEFT];
origini = TOPLEFT;
}
else
{
if ( this.expectedpoints[TOPRIGHT] == null || this.expectedpoints[BOTTOMLEFT] == null )
throw new IllegalArgumentException("Unable to find origin of page.");
this.expectedorigin = new Point.Float(this.expectedpoints[BOTTOMLEFT].x, this.expectedpoints[TOPRIGHT].y );
origini= NONE;
}
if ( this.expectedpoints[TOPLEFT] != null && this.expectedpoints[BOTTOMLEFT] != null )
{
topi = TOPLEFT;
bottomi = BOTTOMLEFT;
}
else if ( this.expectedpoints[TOPRIGHT] != null && this.expectedpoints[BOTTOMRIGHT] != null )
{
topi = TOPRIGHT;
bottomi = BOTTOMRIGHT;
}
else
throw new IllegalArgumentException("Needs a top and bottom bullseye on same side of page.");
if ( this.expectedpoints[TOPLEFT] != null && this.expectedpoints[TOPRIGHT] != null )
{
lefti = TOPLEFT;
righti = TOPRIGHT;
}
else if ( this.expectedpoints[BOTTOMLEFT] != null && this.expectedpoints[BOTTOMRIGHT] != null )
{
lefti = BOTTOMLEFT;
righti = BOTTOMRIGHT;
}
else
throw new IllegalArgumentException("Needs a left and right bullseye on same side of page.");
float dv = this.expectedpoints[bottomi].y - this.expectedpoints[topi].y;
for ( int i=4; i<this.expectedpoints.length; i++ )
{
// interpolate between points top and bottom references
this.expectedpoints[i] = new Point2D.Float( this.expectedpoints[topi].x, this.expectedpoints[topi].y );
this.expectedpoints[i].y += dv*(i-3)/this.vdiv;
}
locator = new BullseyeLocator(null, radius, subradii);
}
public BullseyePage scan( BufferedImage image )
{
int i, pass;
float passradius;
BullseyePage bpage = new BullseyePage();
Point[] points;
bpage.minorcount = minorcount;
bpage.bullseyepointsprint = new Point[expectedpoints.length];
bpage.bullseyepointsscan = new Point[expectedpoints.length];
bpage.searchareas = new Rectangle[2][expectedpoints.length];
bpage.searchimages = new BufferedImage[2][expectedpoints.length];
bpage.votemapimages = new BufferedImage[2][expectedpoints.length];
bpage.topi = topi;
bpage.bottomi = bottomi;
bpage.lefti = lefti;
bpage.righti = righti;
bpage.origini = origini;
bpage.pagebounds = new Rectangle( 0, 0, image.getWidth(), image.getHeight() );
bpage.roughscale = (float)image.getWidth() / width;
for ( i=0; i<expectedpoints.length; i++ )
{
if ( expectedpoints[i] != null )
bpage.bullseyepointsprint[i] = new Point( Math.round(expectedpoints[i].x), Math.round(expectedpoints[i].y) );
}
Point passonepoint = new Point();
for ( pass=0; pass<=1; pass++ )
{
System.out.println( "Page Scanner Pass " + pass );
for ( i=0; i<((pass==0)?4:expectedpoints.length); i++ )
{
System.out.println( "Page Scanner Point " + i );
if ( expectedpoints[i] == null ) continue;
passradius = (i<4)?radius:minradius;
if ( pass == 0 )
bpage.searchareas[pass][i] = new Rectangle(
(int)Math.round(expectedpoints[i].x * bpage.roughscale),
(int)Math.round(expectedpoints[i].y * bpage.roughscale),
0, 0 );
else
{
// Use current three reference points to refine search areas
bpage.toImageCoordinates(expectedpoints[i].x - expectedorigin.x, expectedpoints[i].y - expectedorigin.y, passonepoint, false);
bpage.searchareas[pass][i] = new Rectangle( passonepoint.x, passonepoint.y, 0, 0 );
}
bpage.searchareas[pass][i].grow( (int)Math.round(passradius*(pass==0?4f:1.5f)*bpage.roughscale),
(int)Math.round(passradius*(pass==0?4f:1.5f)*bpage.roughscale) );
bpage.searchareas[pass][i] = bpage.searchareas[pass][i].intersection( bpage.pagebounds );
bpage.searchimages[pass][i] = image.getSubimage(
bpage.searchareas[pass][i].x,
bpage.searchareas[pass][i].y,
bpage.searchareas[pass][i].width,
bpage.searchareas[pass][i].height );
locator.setEstimatedRadius(passradius*bpage.roughscale);
locator.setInputImage( bpage.searchimages[pass][i] );
points = locator.locateBullseye();
bpage.votemapimages[pass][i] = locator.getVoteMapImage();
bpage.bullseyepointsscan[i] = null;
if ( points.length == 1 )
bpage.bullseyepointsscan[i] = new Point(
points[0].x + bpage.searchareas[pass][i].x,
points[0].y + bpage.searchareas[pass][i].y );
else if ( i<3 )
return bpage;
}
// rescale after pass 0
// to get better bullseye centres
// after pass 1 to establish calibration
bpage.calibrate( pass == 1 );
}
bpage.failed = false;
return bpage;
}
}
| 35.05641
| 136
| 0.618637
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.