language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/EKS2EndpointBuilderFactory.java
|
{
"start": 15790,
"end": 19298
}
|
interface ____
extends
EndpointProducerBuilder {
default EKS2EndpointBuilder basic() {
return (EKS2EndpointBuilder) this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedEKS2EndpointBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedEKS2EndpointBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* To use an existing configured AWS EKS client.
*
* The option is a:
* <code>software.amazon.awssdk.services.eks.EksClient</code> type.
*
* Group: advanced
*
* @param eksClient the value to set
* @return the dsl builder
*/
default AdvancedEKS2EndpointBuilder eksClient(software.amazon.awssdk.services.eks.EksClient eksClient) {
doSetProperty("eksClient", eksClient);
return this;
}
/**
* To use an existing configured AWS EKS client.
*
* The option will be converted to a
* <code>software.amazon.awssdk.services.eks.EksClient</code> type.
*
* Group: advanced
*
* @param eksClient the value to set
* @return the dsl builder
*/
default AdvancedEKS2EndpointBuilder eksClient(String eksClient) {
doSetProperty("eksClient", eksClient);
return this;
}
}
public
|
AdvancedEKS2EndpointBuilder
|
java
|
google__guava
|
android/guava/src/com/google/common/reflect/TypeToken.java
|
{
"start": 13345,
"end": 14477
}
|
class ____ T
TypeToken<? super T> superToken = (TypeToken<? super T>) resolveSupertype(superclass);
return superToken;
}
private @Nullable TypeToken<? super T> boundAsSuperclass(Type bound) {
TypeToken<?> token = of(bound);
if (token.getRawType().isInterface()) {
return null;
}
@SuppressWarnings("unchecked") // only upper bound of T is passed in.
TypeToken<? super T> superclass = (TypeToken<? super T>) token;
return superclass;
}
/**
* Returns the generic interfaces that this type directly {@code implements}. This method is
* similar but different from {@link Class#getGenericInterfaces()}. For example, {@code new
* TypeToken<List<String>>() {}.getGenericInterfaces()} will return a list that contains {@code
* new TypeToken<Iterable<String>>() {}}; while {@code List.class.getGenericInterfaces()} will
* return an array that contains {@code Iterable<T>}, where the {@code T} is the type variable
* declared by interface {@code Iterable}.
*
* <p>If this type is a type variable or wildcard, its upper bounds are examined and those that
* are either an
|
of
|
java
|
apache__camel
|
components/camel-cxf/camel-cxf-spring-soap/src/test/java/org/apache/camel/component/cxf/wsdl/OrderTest.java
|
{
"start": 1346,
"end": 1488
}
|
class ____ extends CamelSpringTestSupport {
@BeforeAll
public static void loadTestSupport() {
// Need to load the static
|
OrderTest
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptivebatch/DefaultVertexParallelismAndInputInfosDeciderTest.java
|
{
"start": 2086,
"end": 27023
}
|
class ____ {
private static final long BYTE_256_MB = 256 * 1024 * 1024L;
private static final long BYTE_512_MB = 512 * 1024 * 1024L;
private static final long BYTE_1_GB = 1024 * 1024 * 1024L;
private static final long BYTE_8_GB = 8 * 1024 * 1024 * 1024L;
private static final long BYTE_1_TB = 1024 * 1024 * 1024 * 1024L;
private static final int MAX_PARALLELISM = 100;
private static final int MIN_PARALLELISM = 3;
private static final int VERTEX_MAX_PARALLELISM = 256;
private static final int DEFAULT_SOURCE_PARALLELISM = 10;
private static final long DATA_VOLUME_PER_TASK = 1024 * 1024 * 1024L;
@Test
void testDecideParallelism() {
BlockingResultInfo resultInfo1 = createFromBroadcastResult(BYTE_256_MB);
BlockingResultInfo resultInfo2 = createFromNonBroadcastResult(BYTE_256_MB + BYTE_8_GB);
int parallelism =
createDeciderAndDecideParallelism(Arrays.asList(resultInfo1, resultInfo2));
assertThat(parallelism).isEqualTo(9);
}
@Test
void testInitiallyNormalizedParallelismIsLargerThanMaxParallelism() {
BlockingResultInfo resultInfo1 = createFromBroadcastResult(BYTE_256_MB);
BlockingResultInfo resultInfo2 = createFromNonBroadcastResult(BYTE_8_GB + BYTE_1_TB);
int parallelism =
createDeciderAndDecideParallelism(Arrays.asList(resultInfo1, resultInfo2));
assertThat(parallelism).isEqualTo(MAX_PARALLELISM);
}
@Test
void testInitiallyNormalizedParallelismIsSmallerThanMinParallelism() {
BlockingResultInfo resultInfo1 = createFromBroadcastResult(BYTE_256_MB);
BlockingResultInfo resultInfo2 = createFromNonBroadcastResult(BYTE_512_MB);
int parallelism =
createDeciderAndDecideParallelism(Arrays.asList(resultInfo1, resultInfo2));
assertThat(parallelism).isEqualTo(MIN_PARALLELISM);
}
@Test
void testNonBroadcastBytesCanNotDividedEvenly() {
BlockingResultInfo resultInfo1 = createFromBroadcastResult(BYTE_512_MB);
BlockingResultInfo resultInfo2 = createFromNonBroadcastResult(BYTE_256_MB + BYTE_8_GB);
int parallelism =
createDeciderAndDecideParallelism(Arrays.asList(resultInfo1, resultInfo2));
assertThat(parallelism).isEqualTo(9);
}
@Test
void testAllEdgesAllToAll() {
AllToAllBlockingResultInfo resultInfo1 =
createAllToAllBlockingResultInfo(
new long[] {10L, 15L, 13L, 12L, 1L, 10L, 8L, 20L, 12L, 17L});
AllToAllBlockingResultInfo resultInfo2 =
createAllToAllBlockingResultInfo(
new long[] {8L, 12L, 21L, 9L, 13L, 7L, 19L, 13L, 14L, 5L});
ParallelismAndInputInfos parallelismAndInputInfos =
createDeciderAndDecideParallelismAndInputInfos(
1, 10, 60L, Arrays.asList(resultInfo1, resultInfo2));
assertThat(parallelismAndInputInfos.getParallelism()).isEqualTo(5);
assertThat(parallelismAndInputInfos.getJobVertexInputInfos()).hasSize(2);
List<IndexRange> subpartitionRanges =
Arrays.asList(
new IndexRange(0, 1),
new IndexRange(2, 3),
new IndexRange(4, 6),
new IndexRange(7, 8),
new IndexRange(9, 9));
checkAllToAllJobVertexInputInfo(
parallelismAndInputInfos.getJobVertexInputInfos().get(resultInfo1.getResultId()),
subpartitionRanges);
checkAllToAllJobVertexInputInfo(
parallelismAndInputInfos.getJobVertexInputInfos().get(resultInfo2.getResultId()),
subpartitionRanges);
}
@Test
void testAllEdgesAllToAllAndDecidedParallelismIsMaxParallelism() {
AllToAllBlockingResultInfo resultInfo =
createAllToAllBlockingResultInfo(
new long[] {10L, 15L, 13L, 12L, 1L, 10L, 8L, 20L, 12L, 17L});
ParallelismAndInputInfos parallelismAndInputInfos =
createDeciderAndDecideParallelismAndInputInfos(
1, 2, 10L, Collections.singletonList(resultInfo));
assertThat(parallelismAndInputInfos.getParallelism()).isEqualTo(2);
assertThat(parallelismAndInputInfos.getJobVertexInputInfos()).hasSize(1);
checkAllToAllJobVertexInputInfo(
Iterables.getOnlyElement(
parallelismAndInputInfos.getJobVertexInputInfos().values()),
Arrays.asList(new IndexRange(0, 5), new IndexRange(6, 9)));
}
@Test
void testAllEdgesAllToAllAndDecidedParallelismIsMinParallelism() {
AllToAllBlockingResultInfo resultInfo =
createAllToAllBlockingResultInfo(
new long[] {10L, 15L, 13L, 12L, 1L, 10L, 8L, 20L, 12L, 17L});
ParallelismAndInputInfos parallelismAndInputInfos =
createDeciderAndDecideParallelismAndInputInfos(
4, 10, 1000L, Collections.singletonList(resultInfo));
assertThat(parallelismAndInputInfos.getParallelism()).isEqualTo(4);
assertThat(parallelismAndInputInfos.getJobVertexInputInfos()).hasSize(1);
checkAllToAllJobVertexInputInfo(
Iterables.getOnlyElement(
parallelismAndInputInfos.getJobVertexInputInfos().values()),
Arrays.asList(
new IndexRange(0, 1),
new IndexRange(2, 5),
new IndexRange(6, 7),
new IndexRange(8, 9)));
}
@Test
void testFallBackToEvenlyDistributeSubpartitions() {
AllToAllBlockingResultInfo resultInfo =
createAllToAllBlockingResultInfo(
new long[] {10L, 1L, 10L, 1L, 10L, 1L, 10L, 1L, 10L, 1L});
ParallelismAndInputInfos parallelismAndInputInfos =
createDeciderAndDecideParallelismAndInputInfos(
8, 8, 10L, Collections.singletonList(resultInfo));
assertThat(parallelismAndInputInfos.getParallelism()).isEqualTo(8);
assertThat(parallelismAndInputInfos.getJobVertexInputInfos()).hasSize(1);
checkAllToAllJobVertexInputInfo(
Iterables.getOnlyElement(
parallelismAndInputInfos.getJobVertexInputInfos().values()),
Arrays.asList(
new IndexRange(0, 0),
new IndexRange(1, 1),
new IndexRange(2, 2),
new IndexRange(3, 4),
new IndexRange(5, 5),
new IndexRange(6, 6),
new IndexRange(7, 7),
new IndexRange(8, 9)));
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
void testAllEdgesAllToAllAndOneIsBroadcast(boolean singleSubpartitionContainsAllData) {
AllToAllBlockingResultInfo resultInfo1 =
createAllToAllBlockingResultInfo(
new long[] {10L, 15L, 13L, 12L, 1L, 10L, 8L, 20L, 12L, 17L}, false, false);
AllToAllBlockingResultInfo resultInfo2 =
createAllToAllBlockingResultInfo(
singleSubpartitionContainsAllData
? new long[] {10L}
: new long[] {1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L},
true,
singleSubpartitionContainsAllData);
ParallelismAndInputInfos parallelismAndInputInfos =
createDeciderAndDecideParallelismAndInputInfos(
1, 10, 60L, Arrays.asList(resultInfo1, resultInfo2));
assertThat(parallelismAndInputInfos.getParallelism()).isEqualTo(3);
assertThat(parallelismAndInputInfos.getJobVertexInputInfos()).hasSize(2);
checkAllToAllJobVertexInputInfo(
parallelismAndInputInfos.getJobVertexInputInfos().get(resultInfo1.getResultId()),
Arrays.asList(new IndexRange(0, 4), new IndexRange(5, 8), new IndexRange(9, 9)));
if (singleSubpartitionContainsAllData) {
checkAllToAllJobVertexInputInfo(
parallelismAndInputInfos
.getJobVertexInputInfos()
.get(resultInfo2.getResultId()),
Arrays.asList(
new IndexRange(0, 0), new IndexRange(0, 0), new IndexRange(0, 0)));
} else {
checkAllToAllJobVertexInputInfo(
parallelismAndInputInfos
.getJobVertexInputInfos()
.get(resultInfo2.getResultId()),
Arrays.asList(
new IndexRange(0, 9), new IndexRange(0, 9), new IndexRange(0, 9)));
}
}
@Test
void testAllEdgesBroadcast() {
AllToAllBlockingResultInfo resultInfo1;
AllToAllBlockingResultInfo resultInfo2;
resultInfo1 = createAllToAllBlockingResultInfo(new long[] {10L}, true, false);
resultInfo2 = createAllToAllBlockingResultInfo(new long[] {10L}, true, false);
ParallelismAndInputInfos parallelismAndInputInfos =
createDeciderAndDecideParallelismAndInputInfos(
1, 10, 60L, Arrays.asList(resultInfo1, resultInfo2));
assertThat(parallelismAndInputInfos.getParallelism()).isOne();
assertThat(parallelismAndInputInfos.getJobVertexInputInfos()).hasSize(2);
List<IndexRange> expectedSubpartitionRanges =
Collections.singletonList(new IndexRange(0, 0));
checkAllToAllJobVertexInputInfo(
parallelismAndInputInfos.getJobVertexInputInfos().get(resultInfo1.getResultId()),
expectedSubpartitionRanges);
checkAllToAllJobVertexInputInfo(
parallelismAndInputInfos.getJobVertexInputInfos().get(resultInfo2.getResultId()),
expectedSubpartitionRanges);
}
@Test
void testHavePointwiseEdges() {
AllToAllBlockingResultInfo resultInfo1 =
createAllToAllBlockingResultInfo(
new long[] {10L, 15L, 13L, 12L, 1L, 10L, 8L, 20L, 12L, 17L});
PointwiseBlockingResultInfo resultInfo2 =
createPointwiseBlockingResultInfo(
new long[] {8L, 12L, 21L, 9L, 13L}, new long[] {7L, 19L, 13L, 14L, 5L});
ParallelismAndInputInfos parallelismAndInputInfos =
createDeciderAndDecideParallelismAndInputInfos(
1, 10, 60L, Arrays.asList(resultInfo1, resultInfo2));
assertThat(parallelismAndInputInfos.getParallelism()).isEqualTo(4);
assertThat(parallelismAndInputInfos.getJobVertexInputInfos()).hasSize(2);
checkAllToAllJobVertexInputInfo(
parallelismAndInputInfos.getJobVertexInputInfos().get(resultInfo1.getResultId()),
Arrays.asList(
new IndexRange(0, 1),
new IndexRange(2, 5),
new IndexRange(6, 7),
new IndexRange(8, 9)));
checkJobVertexInputInfo(
parallelismAndInputInfos.getJobVertexInputInfos().get(resultInfo2.getResultId()),
Arrays.asList(
Map.of(new IndexRange(0, 0), new IndexRange(0, 1)),
Map.of(new IndexRange(0, 0), new IndexRange(2, 3)),
Map.of(
new IndexRange(0, 0),
new IndexRange(4, 4),
new IndexRange(1, 1),
new IndexRange(0, 1)),
Map.of(new IndexRange(1, 1), new IndexRange(2, 4))));
}
@Test
void testHavePointwiseAndBroadcastEdge() {
AllToAllBlockingResultInfo resultInfo1 =
createAllToAllBlockingResultInfo(
new long[] {10L, 15L, 13L, 12L, 1L, 10L, 8L, 20L, 12L, 17L}, true, false);
PointwiseBlockingResultInfo resultInfo2 =
createPointwiseBlockingResultInfo(
new long[] {8L, 12L, 21L, 9L, 13L}, new long[] {7L, 19L, 13L, 14L, 5L});
ParallelismAndInputInfos parallelismAndInputInfos =
createDeciderAndDecideParallelismAndInputInfos(
1, 10, 60L, Arrays.asList(resultInfo1, resultInfo2));
assertThat(parallelismAndInputInfos.getParallelism()).isEqualTo(6);
assertThat(parallelismAndInputInfos.getJobVertexInputInfos()).hasSize(2);
checkAllToAllJobVertexInputInfo(
parallelismAndInputInfos.getJobVertexInputInfos().get(resultInfo1.getResultId()),
Arrays.asList(
new IndexRange(0, 9),
new IndexRange(0, 9),
new IndexRange(0, 9),
new IndexRange(0, 9),
new IndexRange(0, 9),
new IndexRange(0, 9)));
checkJobVertexInputInfo(
parallelismAndInputInfos.getJobVertexInputInfos().get(resultInfo2.getResultId()),
Arrays.asList(
Map.of(new IndexRange(0, 0), new IndexRange(0, 1)),
Map.of(new IndexRange(0, 0), new IndexRange(2, 3)),
Map.of(
new IndexRange(0, 0),
new IndexRange(4, 4),
new IndexRange(1, 1),
new IndexRange(0, 0)),
Map.of(new IndexRange(1, 1), new IndexRange(1, 1)),
Map.of(new IndexRange(1, 1), new IndexRange(2, 3)),
Map.of(new IndexRange(1, 1), new IndexRange(4, 4))));
}
@Test
void testSourceJobVertex() {
ParallelismAndInputInfos parallelismAndInputInfos =
createDeciderAndDecideParallelismAndInputInfos(
MIN_PARALLELISM,
MAX_PARALLELISM,
DATA_VOLUME_PER_TASK,
Collections.emptyList());
assertThat(parallelismAndInputInfos.getParallelism()).isEqualTo(DEFAULT_SOURCE_PARALLELISM);
assertThat(parallelismAndInputInfos.getJobVertexInputInfos()).isEmpty();
}
@Test
void testDynamicSourceParallelismWithUpstreamInputs() {
final DefaultVertexParallelismAndInputInfosDecider decider =
createDecider(MIN_PARALLELISM, MAX_PARALLELISM, DATA_VOLUME_PER_TASK);
AllToAllBlockingResultInfo allToAllBlockingResultInfo =
createAllToAllBlockingResultInfo(
new long[] {10L, 15L, 13L, 12L, 1L, 10L, 8L, 20L, 12L, 17L});
int dynamicSourceParallelism = 4;
ParallelismAndInputInfos parallelismAndInputInfos =
decider.decideParallelismAndInputInfosForVertex(
new JobVertexID(),
Collections.singletonList(
toBlockingInputInfoView(allToAllBlockingResultInfo)),
-1,
dynamicSourceParallelism,
MAX_PARALLELISM);
assertThat(parallelismAndInputInfos.getParallelism()).isEqualTo(4);
assertThat(parallelismAndInputInfos.getJobVertexInputInfos()).hasSize(1);
checkAllToAllJobVertexInputInfo(
Iterables.getOnlyElement(
parallelismAndInputInfos.getJobVertexInputInfos().values()),
Arrays.asList(
new IndexRange(0, 1),
new IndexRange(2, 5),
new IndexRange(6, 7),
new IndexRange(8, 9)));
}
@Test
void testComputeSourceParallelismUpperBound() {
Configuration configuration = new Configuration();
configuration.set(
BatchExecutionOptions.ADAPTIVE_AUTO_PARALLELISM_DEFAULT_SOURCE_PARALLELISM,
DEFAULT_SOURCE_PARALLELISM);
VertexParallelismAndInputInfosDecider vertexParallelismAndInputInfosDecider =
createDefaultVertexParallelismAndInputInfosDecider(MAX_PARALLELISM, configuration);
assertThat(
vertexParallelismAndInputInfosDecider.computeSourceParallelismUpperBound(
new JobVertexID(), VERTEX_MAX_PARALLELISM))
.isEqualTo(DEFAULT_SOURCE_PARALLELISM);
}
@Test
void testComputeSourceParallelismUpperBoundFallback() {
Configuration configuration = new Configuration();
VertexParallelismAndInputInfosDecider vertexParallelismAndInputInfosDecider =
createDefaultVertexParallelismAndInputInfosDecider(MAX_PARALLELISM, configuration);
assertThat(
vertexParallelismAndInputInfosDecider.computeSourceParallelismUpperBound(
new JobVertexID(), VERTEX_MAX_PARALLELISM))
.isEqualTo(MAX_PARALLELISM);
}
@Test
void testComputeSourceParallelismUpperBoundNotExceedMaxParallelism() {
Configuration configuration = new Configuration();
configuration.set(
BatchExecutionOptions.ADAPTIVE_AUTO_PARALLELISM_DEFAULT_SOURCE_PARALLELISM,
VERTEX_MAX_PARALLELISM * 2);
VertexParallelismAndInputInfosDecider vertexParallelismAndInputInfosDecider =
createDefaultVertexParallelismAndInputInfosDecider(MAX_PARALLELISM, configuration);
assertThat(
vertexParallelismAndInputInfosDecider.computeSourceParallelismUpperBound(
new JobVertexID(), VERTEX_MAX_PARALLELISM))
.isEqualTo(VERTEX_MAX_PARALLELISM);
}
private static void checkAllToAllJobVertexInputInfo(
JobVertexInputInfo jobVertexInputInfo, List<IndexRange> subpartitionRanges) {
checkAllToAllJobVertexInputInfo(
jobVertexInputInfo, new IndexRange(0, 0), subpartitionRanges);
}
private static void checkAllToAllJobVertexInputInfo(
JobVertexInputInfo jobVertexInputInfo,
IndexRange indexRange,
List<IndexRange> subpartitionRanges) {
List<ExecutionVertexInputInfo> executionVertexInputInfos = new ArrayList<>();
for (int i = 0; i < subpartitionRanges.size(); ++i) {
executionVertexInputInfos.add(
new ExecutionVertexInputInfo(i, indexRange, subpartitionRanges.get(i)));
}
assertThat(jobVertexInputInfo.getExecutionVertexInputInfos())
.containsExactlyInAnyOrderElementsOf(executionVertexInputInfos);
}
private static void checkJobVertexInputInfo(
JobVertexInputInfo jobVertexInputInfo,
List<Map<IndexRange, IndexRange>> consumedSubpartitionGroups) {
List<ExecutionVertexInputInfo> executionVertexInputInfos = new ArrayList<>();
for (int i = 0; i < consumedSubpartitionGroups.size(); ++i) {
executionVertexInputInfos.add(
new ExecutionVertexInputInfo(i, consumedSubpartitionGroups.get(i)));
}
assertThat(jobVertexInputInfo.getExecutionVertexInputInfos())
.containsExactlyInAnyOrderElementsOf(executionVertexInputInfos);
}
static DefaultVertexParallelismAndInputInfosDecider createDecider(
int minParallelism, int maxParallelism, long dataVolumePerTask) {
return createDecider(
minParallelism, maxParallelism, dataVolumePerTask, DEFAULT_SOURCE_PARALLELISM);
}
static DefaultVertexParallelismAndInputInfosDecider createDecider(
int minParallelism,
int maxParallelism,
long dataVolumePerTask,
int defaultSourceParallelism) {
Configuration configuration = new Configuration();
configuration.set(
BatchExecutionOptions.ADAPTIVE_AUTO_PARALLELISM_MIN_PARALLELISM, minParallelism);
configuration.set(
BatchExecutionOptions.ADAPTIVE_AUTO_PARALLELISM_AVG_DATA_VOLUME_PER_TASK,
new MemorySize(dataVolumePerTask));
configuration.set(
BatchExecutionOptions.ADAPTIVE_AUTO_PARALLELISM_DEFAULT_SOURCE_PARALLELISM,
defaultSourceParallelism);
return createDefaultVertexParallelismAndInputInfosDecider(maxParallelism, configuration);
}
static DefaultVertexParallelismAndInputInfosDecider
createDefaultVertexParallelismAndInputInfosDecider(
int maxParallelism, Configuration configuration) {
return DefaultVertexParallelismAndInputInfosDecider.from(
maxParallelism,
BatchExecutionOptionsInternal.ADAPTIVE_SKEWED_OPTIMIZATION_SKEWED_FACTOR
.defaultValue(),
BatchExecutionOptionsInternal.ADAPTIVE_SKEWED_OPTIMIZATION_SKEWED_THRESHOLD
.defaultValue()
.getBytes(),
configuration);
}
private static int createDeciderAndDecideParallelism(List<BlockingResultInfo> consumedResults) {
return createDeciderAndDecideParallelism(
MIN_PARALLELISM, MAX_PARALLELISM, DATA_VOLUME_PER_TASK, consumedResults);
}
private static int createDeciderAndDecideParallelism(
int minParallelism,
int maxParallelism,
long dataVolumePerTask,
List<BlockingResultInfo> consumedResults) {
final DefaultVertexParallelismAndInputInfosDecider decider =
createDecider(minParallelism, maxParallelism, dataVolumePerTask);
return decider.decideParallelism(
new JobVertexID(),
toBlockingInputInfoViews(consumedResults),
minParallelism,
maxParallelism);
}
private static ParallelismAndInputInfos createDeciderAndDecideParallelismAndInputInfos(
int minParallelism,
int maxParallelism,
long dataVolumePerTask,
List<BlockingResultInfo> consumedResults) {
final DefaultVertexParallelismAndInputInfosDecider decider =
createDecider(minParallelism, maxParallelism, dataVolumePerTask);
return decider.decideParallelismAndInputInfosForVertex(
new JobVertexID(),
toBlockingInputInfoViews(consumedResults),
-1,
minParallelism,
maxParallelism);
}
private AllToAllBlockingResultInfo createAllToAllBlockingResultInfo(
long[] aggregatedSubpartitionBytes) {
return createAllToAllBlockingResultInfo(aggregatedSubpartitionBytes, false, false);
}
private AllToAllBlockingResultInfo createAllToAllBlockingResultInfo(
long[] aggregatedSubpartitionBytes,
boolean isBroadcast,
boolean isSingleSubpartitionContainsAllData) {
// For simplicity, we configure only one partition here, so the aggregatedSubpartitionBytes
// is equivalent to the subpartition bytes of partition0
AllToAllBlockingResultInfo resultInfo =
new AllToAllBlockingResultInfo(
new IntermediateDataSetID(),
1,
aggregatedSubpartitionBytes.length,
isBroadcast,
isSingleSubpartitionContainsAllData);
resultInfo.recordPartitionInfo(0, new ResultPartitionBytes(aggregatedSubpartitionBytes));
return resultInfo;
}
private PointwiseBlockingResultInfo createPointwiseBlockingResultInfo(
long[]... subpartitionBytesByPartition) {
final Set<Integer> subpartitionNumSet =
Arrays.stream(subpartitionBytesByPartition)
.map(array -> array.length)
.collect(Collectors.toSet());
// all partitions have the same subpartition num
checkState(subpartitionNumSet.size() == 1);
int numSubpartitions = subpartitionNumSet.iterator().next();
int numPartitions = subpartitionBytesByPartition.length;
PointwiseBlockingResultInfo resultInfo =
new PointwiseBlockingResultInfo(
new IntermediateDataSetID(), numPartitions, numSubpartitions);
int partitionIndex = 0;
for (long[] subpartitionBytes : subpartitionBytesByPartition) {
resultInfo.recordPartitionInfo(
partitionIndex++, new ResultPartitionBytes(subpartitionBytes));
}
return resultInfo;
}
private static
|
DefaultVertexParallelismAndInputInfosDeciderTest
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adapter/DefaultSchedulingPipelinedRegion.java
|
{
"start": 1583,
"end": 5280
}
|
class ____ implements SchedulingPipelinedRegion {
private final Map<ExecutionVertexID, DefaultExecutionVertex> executionVertices;
private Set<ConsumedPartitionGroup> nonPipelinedConsumedPartitionGroups;
private Set<ConsumedPartitionGroup> releaseBySchedulerConsumedPartitionGroups;
private final Function<IntermediateResultPartitionID, DefaultResultPartition>
resultPartitionRetriever;
public DefaultSchedulingPipelinedRegion(
Set<DefaultExecutionVertex> defaultExecutionVertices,
Function<IntermediateResultPartitionID, DefaultResultPartition>
resultPartitionRetriever) {
Preconditions.checkNotNull(defaultExecutionVertices);
this.executionVertices = new HashMap<>();
for (DefaultExecutionVertex executionVertex : defaultExecutionVertices) {
this.executionVertices.put(executionVertex.getId(), executionVertex);
}
this.resultPartitionRetriever = checkNotNull(resultPartitionRetriever);
}
@Override
public Iterable<DefaultExecutionVertex> getVertices() {
return Collections.unmodifiableCollection(executionVertices.values());
}
@Override
public DefaultExecutionVertex getVertex(final ExecutionVertexID vertexId) {
final DefaultExecutionVertex executionVertex = executionVertices.get(vertexId);
if (executionVertex == null) {
throw new IllegalArgumentException(
String.format("Execution vertex %s not found in pipelined region", vertexId));
}
return executionVertex;
}
private void initializeConsumedPartitionGroups() {
final Set<ConsumedPartitionGroup> nonPipelinedConsumedPartitionGroupSet = new HashSet<>();
final Set<ConsumedPartitionGroup> releaseBySchedulerConsumedPartitionGroupSet =
new HashSet<>();
for (DefaultExecutionVertex executionVertex : executionVertices.values()) {
for (ConsumedPartitionGroup consumedPartitionGroup :
executionVertex.getConsumedPartitionGroups()) {
SchedulingResultPartition consumedPartition =
resultPartitionRetriever.apply(consumedPartitionGroup.getFirst());
if (!consumedPartition.getResultType().mustBePipelinedConsumed()) {
nonPipelinedConsumedPartitionGroupSet.add(consumedPartitionGroup);
}
if (consumedPartition.getResultType().isReleaseByScheduler()) {
releaseBySchedulerConsumedPartitionGroupSet.add(consumedPartitionGroup);
}
}
}
this.nonPipelinedConsumedPartitionGroups =
Collections.unmodifiableSet(nonPipelinedConsumedPartitionGroupSet);
this.releaseBySchedulerConsumedPartitionGroups =
Collections.unmodifiableSet(releaseBySchedulerConsumedPartitionGroupSet);
}
@Override
public Iterable<ConsumedPartitionGroup> getAllNonPipelinedConsumedPartitionGroups() {
if (nonPipelinedConsumedPartitionGroups == null) {
initializeConsumedPartitionGroups();
}
return nonPipelinedConsumedPartitionGroups;
}
@Override
public Iterable<ConsumedPartitionGroup> getAllReleaseBySchedulerConsumedPartitionGroups() {
if (releaseBySchedulerConsumedPartitionGroups == null) {
initializeConsumedPartitionGroups();
}
return releaseBySchedulerConsumedPartitionGroups;
}
@Override
public boolean contains(final ExecutionVertexID vertexId) {
return executionVertices.containsKey(vertexId);
}
}
|
DefaultSchedulingPipelinedRegion
|
java
|
spring-projects__spring-security
|
web/src/main/java/org/springframework/security/web/method/annotation/AuthenticationPrincipalArgumentResolver.java
|
{
"start": 3717,
"end": 8157
}
|
class ____ implements HandlerMethodArgumentResolver {
private final Class<AuthenticationPrincipal> annotationType = AuthenticationPrincipal.class;
private SecurityContextHolderStrategy securityContextHolderStrategy = SecurityContextHolder
.getContextHolderStrategy();
private ExpressionParser parser = new SpelExpressionParser();
private SecurityAnnotationScanner<AuthenticationPrincipal> scanner = SecurityAnnotationScanners
.requireUnique(AuthenticationPrincipal.class);
private boolean useAnnotationTemplate = false;
private @Nullable BeanResolver beanResolver;
@Override
public @Nullable Object resolveArgument(MethodParameter parameter, @Nullable ModelAndViewContainer mavContainer,
NativeWebRequest webRequest, @Nullable WebDataBinderFactory binderFactory) {
Authentication authentication = this.securityContextHolderStrategy.getContext().getAuthentication();
if (authentication == null) {
return null;
}
Object principal = authentication.getPrincipal();
AuthenticationPrincipal annotation = findMethodAnnotation(parameter);
Assert.notNull(annotation, "@AuthenticationPrincipal is required. Call supportsParameter first.");
String expressionToParse = annotation.expression();
if (StringUtils.hasLength(expressionToParse)) {
StandardEvaluationContext context = new StandardEvaluationContext();
context.setRootObject(principal);
context.setVariable("this", principal);
// https://github.com/spring-projects/spring-framework/issues/35371
if (this.beanResolver != null) {
context.setBeanResolver(this.beanResolver);
}
Expression expression = this.parser.parseExpression(expressionToParse);
principal = expression.getValue(context);
}
if (principal != null && !ClassUtils.isAssignable(parameter.getParameterType(), principal.getClass())) {
if (annotation.errorOnInvalidType()) {
throw new ClassCastException(principal + " is not assignable to " + parameter.getParameterType());
}
return null;
}
return principal;
}
@Override
public boolean supportsParameter(MethodParameter parameter) {
return findMethodAnnotation(parameter) != null;
}
/**
* Sets the {@link BeanResolver} to be used on the expressions
* @param beanResolver the {@link BeanResolver} to use
*/
public void setBeanResolver(BeanResolver beanResolver) {
this.beanResolver = beanResolver;
}
/**
* Sets the {@link SecurityContextHolderStrategy} to use. The default action is to use
* the {@link SecurityContextHolderStrategy} stored in {@link SecurityContextHolder}.
*
* @since 5.8
*/
public void setSecurityContextHolderStrategy(SecurityContextHolderStrategy securityContextHolderStrategy) {
Assert.notNull(securityContextHolderStrategy, "securityContextHolderStrategy cannot be null");
this.securityContextHolderStrategy = securityContextHolderStrategy;
}
/**
* Configure AuthenticationPrincipal template resolution
* <p>
* By default, this value is <code>null</code>, which indicates that templates should
* not be resolved.
* @param templateDefaults - whether to resolve AuthenticationPrincipal templates
* parameters
* @since 6.4
*/
public void setTemplateDefaults(AnnotationTemplateExpressionDefaults templateDefaults) {
this.scanner = SecurityAnnotationScanners.requireUnique(AuthenticationPrincipal.class, templateDefaults);
this.useAnnotationTemplate = templateDefaults != null;
}
/**
* Obtains the specified {@link Annotation} on the specified {@link MethodParameter}.
* {@link MethodParameter}
* @param parameter the {@link MethodParameter} to search for an {@link Annotation}
* @return the {@link Annotation} that was found or null.
*/
@SuppressWarnings("unchecked")
private @Nullable AuthenticationPrincipal findMethodAnnotation(MethodParameter parameter) {
if (this.useAnnotationTemplate) {
return this.scanner.scan(parameter.getParameter());
}
AuthenticationPrincipal annotation = parameter.getParameterAnnotation(this.annotationType);
if (annotation != null) {
return annotation;
}
Annotation[] annotationsToSearch = parameter.getParameterAnnotations();
for (Annotation toSearch : annotationsToSearch) {
annotation = AnnotationUtils.findAnnotation(toSearch.annotationType(), this.annotationType);
if (annotation != null) {
return MergedAnnotations.from(toSearch).get(this.annotationType).synthesize();
}
}
return null;
}
}
|
AuthenticationPrincipalArgumentResolver
|
java
|
google__guava
|
guava-tests/test/com/google/common/cache/CacheBuilderGwtTest.java
|
{
"start": 1582,
"end": 15140
}
|
class ____ {
private FakeTicker fakeTicker;
@Before
public void setUp() {
fakeTicker = new FakeTicker();
}
@Test
public void loader() throws ExecutionException {
Cache<Integer, Integer> cache = CacheBuilder.newBuilder().build();
Callable<Integer> loader =
new Callable<Integer>() {
private int i = 0;
@Override
public Integer call() throws Exception {
return ++i;
}
};
cache.put(0, 10);
assertThat(cache.get(0, loader)).isEqualTo(10);
assertThat(cache.get(20, loader)).isEqualTo(1);
assertThat(cache.get(34, loader)).isEqualTo(2);
cache.invalidate(0);
assertThat(cache.get(0, loader)).isEqualTo(3);
cache.put(0, 10);
cache.invalidateAll();
assertThat(cache.get(0, loader)).isEqualTo(4);
}
@Test
public void sizeConstraint() {
Cache<Integer, Integer> cache = CacheBuilder.newBuilder().maximumSize(4).build();
cache.put(1, 10);
cache.put(2, 20);
cache.put(3, 30);
cache.put(4, 40);
cache.put(5, 50);
assertThat(cache.getIfPresent(10)).isNull();
// Order required to remove dependence on access order / write order constraint.
assertThat(cache.getIfPresent(2)).isEqualTo(20);
assertThat(cache.getIfPresent(3)).isEqualTo(30);
assertThat(cache.getIfPresent(4)).isEqualTo(40);
assertThat(cache.getIfPresent(5)).isEqualTo(50);
cache.put(1, 10);
assertThat(cache.getIfPresent(1)).isEqualTo(10);
assertThat(cache.getIfPresent(3)).isEqualTo(30);
assertThat(cache.getIfPresent(4)).isEqualTo(40);
assertThat(cache.getIfPresent(5)).isEqualTo(50);
assertThat(cache.getIfPresent(2)).isNull();
}
@SuppressWarnings({"deprecation", "LoadingCacheApply"})
@Test
public void loadingCache() throws ExecutionException {
CacheLoader<Integer, Integer> loader =
new CacheLoader<Integer, Integer>() {
int i = 0;
@Override
public Integer load(Integer key) throws Exception {
return i++;
}
};
LoadingCache<Integer, Integer> cache = CacheBuilder.newBuilder().build(loader);
cache.put(10, 20);
Map<Integer, Integer> map = cache.getAll(ImmutableList.of(10, 20, 30, 54, 443, 1));
assertThat(map).containsEntry(10, 20);
assertThat(map).containsEntry(20, 0);
assertThat(map).containsEntry(30, 1);
assertThat(map).containsEntry(54, 2);
assertThat(map).containsEntry(443, 3);
assertThat(map).containsEntry(1, 4);
assertThat(cache.get(6)).isEqualTo(5);
assertThat(cache.apply(7)).isEqualTo(6);
}
@Test
public void expireAfterAccess() {
Cache<Integer, Integer> cache =
CacheBuilder.newBuilder().expireAfterAccess(1000, MILLISECONDS).ticker(fakeTicker).build();
cache.put(0, 10);
cache.put(2, 30);
fakeTicker.advance(999, MILLISECONDS);
assertThat(cache.getIfPresent(2)).isEqualTo(30);
fakeTicker.advance(1, MILLISECONDS);
assertThat(cache.getIfPresent(2)).isEqualTo(30);
fakeTicker.advance(1000, MILLISECONDS);
assertThat(cache.getIfPresent(0)).isNull();
}
@Test
public void expireAfterWrite() {
Cache<Integer, Integer> cache =
CacheBuilder.newBuilder().expireAfterWrite(1000, MILLISECONDS).ticker(fakeTicker).build();
cache.put(10, 100);
cache.put(20, 200);
cache.put(4, 2);
fakeTicker.advance(999, MILLISECONDS);
assertThat(cache.getIfPresent(10)).isEqualTo(100);
assertThat(cache.getIfPresent(20)).isEqualTo(200);
assertThat(cache.getIfPresent(4)).isEqualTo(2);
fakeTicker.advance(2, MILLISECONDS);
assertThat(cache.getIfPresent(10)).isNull();
assertThat(cache.getIfPresent(20)).isNull();
assertThat(cache.getIfPresent(4)).isNull();
cache.put(10, 20);
assertThat(cache.getIfPresent(10)).isEqualTo(20);
fakeTicker.advance(1000, MILLISECONDS);
assertThat(cache.getIfPresent(10)).isNull();
}
@Test
public void expireAfterWriteAndAccess() {
Cache<Integer, Integer> cache =
CacheBuilder.newBuilder()
.expireAfterWrite(1000, MILLISECONDS)
.expireAfterAccess(500, MILLISECONDS)
.ticker(fakeTicker)
.build();
cache.put(10, 100);
cache.put(20, 200);
cache.put(4, 2);
fakeTicker.advance(499, MILLISECONDS);
assertThat(cache.getIfPresent(10)).isEqualTo(100);
assertThat(cache.getIfPresent(20)).isEqualTo(200);
fakeTicker.advance(2, MILLISECONDS);
assertThat(cache.getIfPresent(10)).isEqualTo(100);
assertThat(cache.getIfPresent(20)).isEqualTo(200);
assertThat(cache.getIfPresent(4)).isNull();
fakeTicker.advance(499, MILLISECONDS);
assertThat(cache.getIfPresent(10)).isNull();
assertThat(cache.getIfPresent(20)).isNull();
cache.put(10, 20);
assertThat(cache.getIfPresent(10)).isEqualTo(20);
fakeTicker.advance(500, MILLISECONDS);
assertThat(cache.getIfPresent(10)).isNull();
}
@SuppressWarnings("ContainsEntryAfterGetInteger") // we are testing our implementation of Map.get
@Test
public void mapMethods() {
Cache<Integer, Integer> cache = CacheBuilder.newBuilder().build();
ConcurrentMap<Integer, Integer> asMap = cache.asMap();
cache.put(10, 100);
cache.put(2, 52);
asMap.replace(2, 79);
asMap.replace(3, 60);
assertThat(cache.getIfPresent(3)).isNull();
assertThat(asMap.get(3)).isNull();
assertThat(cache.getIfPresent(2)).isEqualTo(79);
assertThat(asMap.get(2)).isEqualTo(79);
asMap.replace(10, 100, 50);
asMap.replace(2, 52, 99);
assertThat(cache.getIfPresent(10)).isEqualTo(50);
assertThat(asMap.get(10)).isEqualTo(50);
assertThat(cache.getIfPresent(2)).isEqualTo(79);
assertThat(asMap.get(2)).isEqualTo(79);
asMap.remove(10, 100);
asMap.remove(2, 79);
assertThat(cache.getIfPresent(10)).isEqualTo(50);
assertThat(asMap.get(10)).isEqualTo(50);
assertThat(cache.getIfPresent(2)).isNull();
assertThat(asMap.get(2)).isNull();
asMap.putIfAbsent(2, 20);
asMap.putIfAbsent(10, 20);
assertThat(cache.getIfPresent(2)).isEqualTo(20);
assertThat(asMap.get(2)).isEqualTo(20);
assertThat(cache.getIfPresent(10)).isEqualTo(50);
assertThat(asMap.get(10)).isEqualTo(50);
}
@Test
public void removalListener() {
int[] stats = new int[4];
RemovalListener<Integer, Integer> countingListener =
new RemovalListener<Integer, Integer>() {
@Override
public void onRemoval(RemovalNotification<Integer, Integer> notification) {
switch (notification.getCause()) {
case EXPIRED:
stats[0]++;
break;
case EXPLICIT:
stats[1]++;
break;
case REPLACED:
stats[2]++;
break;
case SIZE:
stats[3]++;
break;
default:
throw new IllegalStateException("No collected exceptions in GWT CacheBuilder.");
}
}
};
Cache<Integer, Integer> cache =
CacheBuilder.newBuilder()
.expireAfterWrite(1000, MILLISECONDS)
.removalListener(countingListener)
.ticker(fakeTicker)
.maximumSize(2)
.build();
// Add more than two elements to increment size removals.
cache.put(3, 20);
cache.put(6, 2);
cache.put(98, 45);
cache.put(56, 76);
cache.put(23, 84);
// Replace the two present elements.
cache.put(23, 20);
cache.put(56, 49);
cache.put(23, 2);
cache.put(56, 4);
// Expire the two present elements.
fakeTicker.advance(1001, MILLISECONDS);
Integer unused1 = cache.getIfPresent(23);
Integer unused2 = cache.getIfPresent(56);
// Add two elements and invalidate them.
cache.put(1, 4);
cache.put(2, 8);
cache.invalidateAll();
assertThat(stats[0]).isEqualTo(2);
assertThat(stats[1]).isEqualTo(2);
assertThat(stats[2]).isEqualTo(4);
assertThat(stats[3]).isEqualTo(3);
}
@Test
public void putAll() {
Cache<Integer, Integer> cache = CacheBuilder.newBuilder().build();
cache.putAll(ImmutableMap.of(10, 20, 30, 50, 60, 90));
assertThat(cache.getIfPresent(10)).isEqualTo(20);
assertThat(cache.getIfPresent(30)).isEqualTo(50);
assertThat(cache.getIfPresent(60)).isEqualTo(90);
cache.asMap().putAll(ImmutableMap.of(10, 50, 30, 20, 60, 70, 5, 5));
assertThat(cache.getIfPresent(10)).isEqualTo(50);
assertThat(cache.getIfPresent(30)).isEqualTo(20);
assertThat(cache.getIfPresent(60)).isEqualTo(70);
assertThat(cache.getIfPresent(5)).isEqualTo(5);
}
@Test
public void invalidate() {
Cache<Integer, Integer> cache = CacheBuilder.newBuilder().build();
cache.put(654, 2675);
cache.put(2456, 56);
cache.put(2, 15);
cache.invalidate(654);
assertThat(cache.asMap().containsKey(654)).isFalse();
assertThat(cache.asMap().containsKey(2456)).isTrue();
assertThat(cache.asMap().containsKey(2)).isTrue();
}
@Test
public void invalidateAll() {
Cache<Integer, Integer> cache = CacheBuilder.newBuilder().build();
cache.put(654, 2675);
cache.put(2456, 56);
cache.put(2, 15);
cache.invalidateAll();
assertThat(cache.asMap().containsKey(654)).isFalse();
assertThat(cache.asMap().containsKey(2456)).isFalse();
assertThat(cache.asMap().containsKey(2)).isFalse();
cache.put(654, 2675);
cache.put(2456, 56);
cache.put(2, 15);
cache.put(1, 3);
cache.invalidateAll(ImmutableSet.of(1, 2));
assertThat(cache.asMap().containsKey(1)).isFalse();
assertThat(cache.asMap().containsKey(2)).isFalse();
assertThat(cache.asMap().containsKey(654)).isTrue();
assertThat(cache.asMap().containsKey(2456)).isTrue();
}
@Test
public void asMap_containsValue() {
Cache<Integer, Integer> cache =
CacheBuilder.newBuilder().expireAfterWrite(20000, MILLISECONDS).ticker(fakeTicker).build();
cache.put(654, 2675);
fakeTicker.advance(10000, MILLISECONDS);
cache.put(2456, 56);
cache.put(2, 15);
fakeTicker.advance(10001, MILLISECONDS);
assertThat(cache.asMap().containsValue(15)).isTrue();
assertThat(cache.asMap().containsValue(56)).isTrue();
assertThat(cache.asMap().containsValue(2675)).isFalse();
}
// we are testing our implementation of Map.containsKey
@SuppressWarnings("ContainsEntryAfterGetInteger")
@Test
public void asMap_containsKey() {
Cache<Integer, Integer> cache =
CacheBuilder.newBuilder().expireAfterWrite(20000, MILLISECONDS).ticker(fakeTicker).build();
cache.put(654, 2675);
fakeTicker.advance(10000, MILLISECONDS);
cache.put(2456, 56);
cache.put(2, 15);
fakeTicker.advance(10001, MILLISECONDS);
assertThat(cache.asMap().containsKey(2)).isTrue();
assertThat(cache.asMap().containsKey(2456)).isTrue();
assertThat(cache.asMap().containsKey(654)).isFalse();
}
// we are testing our implementation of Map.values().contains
@SuppressWarnings("ValuesContainsValue")
@Test
public void asMapValues_contains() {
Cache<Integer, Integer> cache =
CacheBuilder.newBuilder().expireAfterWrite(1000, MILLISECONDS).ticker(fakeTicker).build();
cache.put(10, 20);
fakeTicker.advance(500, MILLISECONDS);
cache.put(20, 22);
cache.put(5, 10);
fakeTicker.advance(501, MILLISECONDS);
assertThat(cache.asMap().values().contains(22)).isTrue();
assertThat(cache.asMap().values().contains(10)).isTrue();
assertThat(cache.asMap().values().contains(20)).isFalse();
}
@Test
public void asMapKeySet() {
Cache<Integer, Integer> cache =
CacheBuilder.newBuilder().expireAfterWrite(1000, MILLISECONDS).ticker(fakeTicker).build();
cache.put(10, 20);
fakeTicker.advance(500, MILLISECONDS);
cache.put(20, 22);
cache.put(5, 10);
fakeTicker.advance(501, MILLISECONDS);
Set<Integer> foundKeys = new HashSet<>(cache.asMap().keySet());
assertThat(foundKeys).containsExactly(20, 5);
}
@Test
public void asMapKeySet_contains() {
Cache<Integer, Integer> cache =
CacheBuilder.newBuilder().expireAfterWrite(1000, MILLISECONDS).ticker(fakeTicker).build();
cache.put(10, 20);
fakeTicker.advance(500, MILLISECONDS);
cache.put(20, 22);
cache.put(5, 10);
fakeTicker.advance(501, MILLISECONDS);
assertThat(cache.asMap().keySet().contains(20)).isTrue();
assertThat(cache.asMap().keySet().contains(5)).isTrue();
assertThat(cache.asMap().keySet().contains(10)).isFalse();
}
@Test
public void asMapEntrySet() {
Cache<Integer, Integer> cache =
CacheBuilder.newBuilder().expireAfterWrite(1000, MILLISECONDS).ticker(fakeTicker).build();
cache.put(10, 20);
fakeTicker.advance(500, MILLISECONDS);
cache.put(20, 22);
cache.put(5, 10);
fakeTicker.advance(501, MILLISECONDS);
int sum = 0;
for (Entry<Integer, Integer> current : cache.asMap().entrySet()) {
sum += current.getKey() + current.getValue();
}
assertThat(sum).isEqualTo(57);
}
@Test
public void asMapValues_iteratorRemove() {
Cache<Integer, Integer> cache =
CacheBuilder.newBuilder().expireAfterWrite(1000, MILLISECONDS).ticker(fakeTicker).build();
cache.put(10, 20);
Iterator<Integer> iterator = cache.asMap().values().iterator();
iterator.next();
iterator.remove();
assertThat(cache.size()).isEqualTo(0);
}
}
|
CacheBuilderGwtTest
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/cglib/core/ClassGenerator.java
|
{
"start": 712,
"end": 798
}
|
interface ____ {
void generateClass(ClassVisitor v) throws Exception;
}
|
ClassGenerator
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/injection/guice/multibindings/MapBinder.java
|
{
"start": 3798,
"end": 7998
}
|
class ____<K, V> {
private MapBinder() {}
/**
* Returns a new mapbinder that collects entries of {@code keyType}/{@code valueType} in a
* {@link Map} that is itself bound with no binding annotation.
*/
public static <K, V> MapBinder<K, V> newMapBinder(Binder binder, Class<K> keyType, Class<V> valueType) {
TypeLiteral<K> keyType1 = TypeLiteral.get(keyType);
TypeLiteral<V> valueType1 = TypeLiteral.get(valueType);
binder = binder.skipSources(MapBinder.class, RealMapBinder.class);
return newMapBinder(
binder,
valueType1,
Key.get(mapOf(keyType1, valueType1)),
Key.get(mapOfProviderOf(keyType1, valueType1)),
Multibinder.newSetBinder(binder, entryOfProviderOf(keyType1, valueType1))
);
}
@SuppressWarnings("unchecked") // a map of <K, V> is safely a Map<K, V>
private static <K, V> TypeLiteral<Map<K, V>> mapOf(TypeLiteral<K> keyType, TypeLiteral<V> valueType) {
return (TypeLiteral<Map<K, V>>) TypeLiteral.get(Types.mapOf(keyType.getType(), valueType.getType()));
}
@SuppressWarnings("unchecked") // a provider map <K, V> is safely a Map<K, Provider<V>>
private static <K, V> TypeLiteral<Map<K, Provider<V>>> mapOfProviderOf(TypeLiteral<K> keyType, TypeLiteral<V> valueType) {
return (TypeLiteral<Map<K, Provider<V>>>) TypeLiteral.get(
Types.mapOf(keyType.getType(), newParameterizedType(Provider.class, valueType.getType()))
);
}
@SuppressWarnings("unchecked") // a provider entry <K, V> is safely a Map.Entry<K, Provider<V>>
private static <K, V> TypeLiteral<Map.Entry<K, Provider<V>>> entryOfProviderOf(TypeLiteral<K> keyType, TypeLiteral<V> valueType) {
return (TypeLiteral<Entry<K, Provider<V>>>) TypeLiteral.get(
newParameterizedTypeWithOwner(Map.class, Entry.class, keyType.getType(), Types.providerOf(valueType.getType()))
);
}
private static <K, V> MapBinder<K, V> newMapBinder(
Binder binder,
TypeLiteral<V> valueType,
Key<Map<K, V>> mapKey,
Key<Map<K, Provider<V>>> providerMapKey,
Multibinder<Entry<K, Provider<V>>> entrySetBinder
) {
RealMapBinder<K, V> mapBinder = new RealMapBinder<>(binder, valueType, mapKey, providerMapKey, entrySetBinder);
binder.install(mapBinder);
return mapBinder;
}
/**
* Returns a binding builder used to add a new entry in the map. Each
* key must be distinct (and non-null). Bound providers will be evaluated each
* time the map is injected.
* <p>
* It is an error to call this method without also calling one of the
* {@code to} methods on the returned binding builder.
* <p>
* Scoping elements independently is supported. Use the {@code in} method
* to specify a binding scope.
*/
public abstract LinkedBindingBuilder<V> addBinding(K key);
/**
* The actual mapbinder plays several roles:
* <p>
* As a MapBinder, it acts as a factory for LinkedBindingBuilders for
* each of the map's values. It delegates to a {@link Multibinder} of
* entries (keys to value providers).
* <p>
* As a Module, it installs the binding to the map itself, as well as to
* a corresponding map whose values are providers. It uses the entry set
* multibinder to construct the map and the provider map.
* <p>
* As a module, this implements equals() and hashcode() in order to trick
* Guice into executing its configure() method only once. That makes it so
* that multiple mapbinders can be created for the same target map, but
* only one is bound. Since the list of bindings is retrieved from the
* injector itself (and not the mapbinder), each mapbinder has access to
* all contributions from all equivalent mapbinders.
* <p>
* Rather than binding a single Map.Entry<K, V>, the map binder
* binds keys and values independently. This allows the values to be properly
* scoped.
* <p>
* We use a subclass to hide 'implements Module' from the public API.
*/
public static final
|
MapBinder
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/AddToClusterNodeLabelsResponse.java
|
{
"start": 1082,
"end": 1258
}
|
class ____ {
public static AddToClusterNodeLabelsResponse newInstance() {
return Records.newRecord(AddToClusterNodeLabelsResponse.class);
}
}
|
AddToClusterNodeLabelsResponse
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobStorageDeletionTaskIT.java
|
{
"start": 2805,
"end": 11448
}
|
class ____ extends BaseMlIntegTestCase {
private static final long bucketSpan = AnalysisConfig.Builder.DEFAULT_BUCKET_SPAN.getMillis();
private static final String UNRELATED_INDEX = "unrelated-data";
private JobResultsProvider jobResultsProvider;
private JobResultsPersister jobResultsPersister;
@Before
public void createComponents() {
Settings settings = nodeSettings(0, Settings.EMPTY);
ThreadPool tp = mockThreadPool();
ClusterSettings clusterSettings = new ClusterSettings(
settings,
new HashSet<>(
Arrays.asList(
InferenceProcessor.MAX_INFERENCE_PROCESSORS,
MasterService.MASTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING,
ResultsPersisterService.PERSIST_RESULTS_MAX_RETRIES,
OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING,
ClusterService.USER_DEFINED_METADATA,
ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING,
ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING,
ClusterApplierService.CLUSTER_APPLIER_THREAD_WATCHDOG_INTERVAL,
ClusterApplierService.CLUSTER_APPLIER_THREAD_WATCHDOG_QUIET_TIME
)
)
);
ClusterService clusterService = new ClusterService(settings, clusterSettings, tp, null);
OriginSettingClient originSettingClient = new OriginSettingClient(client(), ClientHelper.ML_ORIGIN);
ResultsPersisterService resultsPersisterService = new ResultsPersisterService(tp, originSettingClient, clusterService, settings);
jobResultsProvider = new JobResultsProvider(client(), settings, TestIndexNameExpressionResolver.newInstance());
jobResultsPersister = new JobResultsPersister(originSettingClient, resultsPersisterService);
}
public void testUnrelatedIndexNotTouched() throws Exception {
internalCluster().ensureAtLeastNumDataNodes(1);
ensureStableCluster(1);
client().admin().indices().prepareCreate(UNRELATED_INDEX).get();
enableIndexBlock(UNRELATED_INDEX, IndexMetadata.SETTING_READ_ONLY);
Job.Builder job = createJob("delete-aliases-test-job", ByteSizeValue.ofMb(2));
PutJobAction.Request putJobRequest = new PutJobAction.Request(job);
client().execute(PutJobAction.INSTANCE, putJobRequest).actionGet();
OpenJobAction.Request openJobRequest = new OpenJobAction.Request(job.getId());
client().execute(OpenJobAction.INSTANCE, openJobRequest).actionGet();
awaitJobOpenedAndAssigned(job.getId(), null);
DeleteJobAction.Request deleteJobRequest = new DeleteJobAction.Request(job.getId());
deleteJobRequest.setForce(true);
client().execute(DeleteJobAction.INSTANCE, deleteJobRequest).actionGet();
// If the deletion of aliases touches the unrelated index with the block
// then the line above will throw a ClusterBlockException
disableIndexBlock(UNRELATED_INDEX, IndexMetadata.SETTING_READ_ONLY);
}
public void testDeleteDedicatedJobWithDataInShared() throws Exception {
internalCluster().ensureAtLeastNumDataNodes(1);
ensureStableCluster(1);
String jobIdDedicated = "delete-test-job-dedicated";
Job.Builder job = createJob(jobIdDedicated, ByteSizeValue.ofMb(2)).setResultsIndexName(jobIdDedicated + "-000001");
client().execute(PutJobAction.INSTANCE, new PutJobAction.Request(job)).actionGet();
client().execute(OpenJobAction.INSTANCE, new OpenJobAction.Request(job.getId())).actionGet();
String dedicatedIndex = job.build().getInitialResultsIndexName();
awaitJobOpenedAndAssigned(job.getId(), null);
createBuckets(jobIdDedicated, 1, 10);
String jobIdShared = "delete-test-job-shared";
job = createJob(jobIdShared, ByteSizeValue.ofMb(2));
client().execute(PutJobAction.INSTANCE, new PutJobAction.Request(job)).actionGet();
client().execute(OpenJobAction.INSTANCE, new OpenJobAction.Request(job.getId())).actionGet();
awaitJobOpenedAndAssigned(job.getId(), null);
createBuckets(jobIdShared, 1, 10);
// Manually switching over alias info
IndicesAliasesRequest aliasesRequest = new IndicesAliasesRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).addAliasAction(
IndicesAliasesRequest.AliasActions.add()
.alias(AnomalyDetectorsIndex.jobResultsAliasedName(jobIdDedicated))
.isHidden(true)
.index(AnomalyDetectorsIndex.jobResultsIndexPrefix() + "shared-000001")
.writeIndex(false)
.filter(QueryBuilders.boolQuery().filter(QueryBuilders.termQuery(Job.ID.getPreferredName(), jobIdDedicated)))
)
.addAliasAction(
IndicesAliasesRequest.AliasActions.add()
.alias(AnomalyDetectorsIndex.resultsWriteAlias(jobIdDedicated))
.index(AnomalyDetectorsIndex.jobResultsIndexPrefix() + "shared-000001")
.isHidden(true)
.writeIndex(true)
)
.addAliasAction(
IndicesAliasesRequest.AliasActions.remove()
.alias(AnomalyDetectorsIndex.resultsWriteAlias(jobIdDedicated))
.index(dedicatedIndex)
);
client().admin().indices().aliases(aliasesRequest).actionGet();
createBuckets(jobIdDedicated, 11, 10);
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*").get();
AtomicReference<QueryPage<Bucket>> bucketHandler = new AtomicReference<>();
AtomicReference<Exception> failureHandler = new AtomicReference<>();
blockingCall(
listener -> jobResultsProvider.buckets(
jobIdDedicated,
new BucketsQueryBuilder().from(0).size(22),
listener::onResponse,
listener::onFailure,
client()
),
bucketHandler,
failureHandler
);
assertThat(failureHandler.get(), is(nullValue()));
assertThat(bucketHandler.get().count(), equalTo(22L));
DeleteJobAction.Request deleteJobRequest = new DeleteJobAction.Request(jobIdDedicated);
deleteJobRequest.setForce(true);
client().execute(DeleteJobAction.INSTANCE, deleteJobRequest).get();
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*").get();
// Make sure our shared index job is OK
bucketHandler = new AtomicReference<>();
failureHandler = new AtomicReference<>();
blockingCall(
listener -> jobResultsProvider.buckets(
jobIdShared,
new BucketsQueryBuilder().from(0).size(21),
listener::onResponse,
listener::onFailure,
client()
),
bucketHandler,
failureHandler
);
assertThat(failureHandler.get(), is(nullValue()));
assertThat(bucketHandler.get().count(), equalTo(11L));
// Make sure dedicated index is gone
assertThat(
indicesAdmin().prepareGetIndex(TEST_REQUEST_TIMEOUT)
.setIndices(dedicatedIndex)
.setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN)
.get()
.indices().length,
equalTo(0)
);
// Make sure all results referencing the dedicated job are gone
assertHitCount(
prepareSearch().setIndices(AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*")
.setIndicesOptions(IndicesOptions.lenientExpandOpenHidden())
.setTrackTotalHits(true)
.setSize(0)
.setSource(
SearchSourceBuilder.searchSource()
.query(QueryBuilders.boolQuery().filter(QueryBuilders.termQuery(Job.ID.getPreferredName(), jobIdDedicated)))
),
0
);
}
private void createBuckets(String jobId, int from, int count) {
JobResultsPersister.Builder builder = jobResultsPersister.bulkPersisterBuilder(jobId);
for (int i = from; i <= count + from; ++i) {
Bucket bucket = new Bucket(jobId, new Date(bucketSpan * i), bucketSpan);
builder.persistBucket(bucket);
}
builder.executeRequest();
}
}
|
JobStorageDeletionTaskIT
|
java
|
google__guice
|
core/test/com/googlecode/guice/JakartaTest.java
|
{
"start": 13793,
"end": 13891
}
|
class ____ {
static int nextInstanceId = 0;
int instanceId = nextInstanceId++;
}
static
|
J
|
java
|
hibernate__hibernate-orm
|
tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/mixedmode/Insurance.java
|
{
"start": 239,
"end": 569
}
|
class ____ {
String number;
String policyNumber;
public String getPolicyNumber() {
return policyNumber;
}
public void setPolicyNumber(String policyNumber) {
this.policyNumber = policyNumber;
}
public String getNumber() {
return number;
}
public void setNumber(String number) {
this.number = number;
}
}
|
Insurance
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/time/DateChecker.java
|
{
"start": 2732,
"end": 8172
}
|
class ____ extends BugChecker
implements MethodInvocationTreeMatcher, NewClassTreeMatcher {
private static final String DATE = "java.util.Date";
private static final Matcher<ExpressionTree> CONSTRUCTORS =
anyOf(
constructor().forClass(DATE).withParameters("int", "int", "int"),
constructor().forClass(DATE).withParameters("int", "int", "int", "int", "int"),
constructor().forClass(DATE).withParameters("int", "int", "int", "int", "int", "int"));
private static final Matcher<ExpressionTree> SET_YEAR =
instanceMethod().onExactClass(DATE).named("setYear");
private static final Matcher<ExpressionTree> SET_MONTH =
instanceMethod().onExactClass(DATE).named("setMonth");
private static final Matcher<ExpressionTree> SET_DAY =
instanceMethod().onExactClass(DATE).named("setDate");
private static final Matcher<ExpressionTree> SET_HOUR =
instanceMethod().onExactClass(DATE).named("setHours");
private static final Matcher<ExpressionTree> SET_MIN =
instanceMethod().onExactClass(DATE).named("setMinutes");
private static final Matcher<ExpressionTree> SET_SEC =
instanceMethod().onExactClass(DATE).named("setSeconds");
// permits years [1901, 2200] which seems ~reasonable
private static final Range<Integer> YEAR_RANGE = Range.closed(1, 300);
private static final Range<Integer> MONTH_RANGE = Range.closed(0, 11);
private static final Range<Integer> DAY_RANGE = Range.closed(1, 31);
private static final Range<Integer> HOUR_RANGE = Range.closed(0, 23);
private static final Range<Integer> SEC_MIN_RANGE = Range.closed(0, 59);
@Override
public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) {
List<String> errors = new ArrayList<>();
if (tree.getArguments().size() == 1) {
ExpressionTree arg0 = tree.getArguments().get(0);
if (SET_YEAR.matches(tree, state)) {
checkYear(arg0, errors);
} else if (SET_MONTH.matches(tree, state)) {
checkMonth(arg0, errors);
} else if (SET_DAY.matches(tree, state)) {
checkDay(arg0, errors);
} else if (SET_HOUR.matches(tree, state)) {
checkHours(arg0, errors);
} else if (SET_MIN.matches(tree, state)) {
checkMinutes(arg0, errors);
} else if (SET_SEC.matches(tree, state)) {
checkSeconds(arg0, errors);
}
}
return buildDescription(tree, errors);
}
@Override
public Description matchNewClass(NewClassTree tree, VisitorState state) {
List<String> errors = new ArrayList<>();
if (CONSTRUCTORS.matches(tree, state)) {
List<? extends ExpressionTree> args = tree.getArguments();
int numArgs = args.size();
verify(
numArgs >= 3 && numArgs <= 6,
"Expected the constructor to have at least 3 and at most 6 arguments, but it had %s",
numArgs);
checkYear(args.get(0), errors);
checkMonth(args.get(1), errors);
checkDay(args.get(2), errors);
if (numArgs > 4) {
checkHours(args.get(3), errors);
checkMinutes(args.get(4), errors);
}
if (numArgs > 5) {
checkSeconds(args.get(5), errors);
}
}
return buildDescription(tree, errors);
}
private Description buildDescription(ExpressionTree tree, List<String> errors) {
return errors.isEmpty()
? Description.NO_MATCH
: buildDescription(tree)
.setMessage(
"This Date usage looks suspect for the following reason(s): "
+ Joiner.on(" ").join(errors))
.build();
}
private static void checkYear(ExpressionTree tree, List<String> errors) {
checkBounds(tree, "1900-based year", YEAR_RANGE, errors);
}
private static void checkMonth(ExpressionTree tree, List<String> errors) {
checkBounds(tree, "0-based month", MONTH_RANGE, errors);
if (tree instanceof LiteralTree literalTree) {
int monthValue = (int) literalTree.getValue();
try {
errors.add(
String.format(
"Use Calendar.%s instead of %s to represent the month.",
Month.of(monthValue + 1), monthValue));
} catch (DateTimeException badMonth) {
// this is an out of bounds month, and thus already caught by the checkBounds() call above!
}
}
}
private static void checkDay(ExpressionTree tree, List<String> errors) {
// TODO(kak): we should also consider checking if the given day is valid for the given
// month/year. E.g., Feb 30th is never valid, Feb 29th is sometimes valid, and Feb 28th is
// always valid.
checkBounds(tree, "day", DAY_RANGE, errors);
}
private static void checkHours(ExpressionTree tree, List<String> errors) {
checkBounds(tree, "hours", HOUR_RANGE, errors);
}
private static void checkMinutes(ExpressionTree tree, List<String> errors) {
checkBounds(tree, "minutes", SEC_MIN_RANGE, errors);
}
private static void checkSeconds(ExpressionTree tree, List<String> errors) {
checkBounds(tree, "seconds", SEC_MIN_RANGE, errors);
}
private static void checkBounds(
ExpressionTree tree, String type, Range<Integer> range, List<String> errors) {
Integer value = ASTHelpers.constValue(tree, Integer.class);
if (value != null && !range.contains(value)) {
errors.add(String.format("The %s value (%s) is out of bounds %s.", type, value, range));
}
}
}
|
DateChecker
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/OptionalMapToOptionalTest.java
|
{
"start": 880,
"end": 1224
}
|
class ____ {
private final CompilationTestHelper helper =
CompilationTestHelper.newInstance(OptionalMapToOptional.class, getClass());
@Test
public void positiveWithJavaOptional() {
helper
.addSourceLines(
"Test.java",
"""
import java.util.Optional;
|
OptionalMapToOptionalTest
|
java
|
spring-projects__spring-security
|
web/src/test/java/org/springframework/security/web/server/csrf/ServerCsrfTokenRequestAttributeHandlerTests.java
|
{
"start": 1321,
"end": 5133
}
|
class ____ {
private ServerCsrfTokenRequestAttributeHandler handler;
private MockServerWebExchange exchange;
private CsrfToken token;
@BeforeEach
public void setUp() {
this.handler = new ServerCsrfTokenRequestAttributeHandler();
this.exchange = MockServerWebExchange.builder(MockServerHttpRequest.get("/")).build();
this.token = new DefaultCsrfToken("headerName", "paramName", "csrfTokenValue");
}
@Test
public void handleWhenExchangeIsNullThenThrowsIllegalArgumentException() {
// @formatter:off
assertThatIllegalArgumentException()
.isThrownBy(() -> this.handler.handle(null, Mono.just(this.token)))
.withMessage("exchange cannot be null");
// @formatter:on
}
@Test
public void handleWhenCsrfTokenIsNullThenThrowsIllegalArgumentException() {
// @formatter:off
assertThatIllegalArgumentException()
.isThrownBy(() -> this.handler.handle(this.exchange, null))
.withMessage("csrfToken cannot be null");
// @formatter:on
}
@Test
public void handleWhenValidParametersThenExchangeAttributeSet() {
Mono<CsrfToken> csrfToken = Mono.just(this.token);
this.handler.handle(this.exchange, csrfToken);
Mono<CsrfToken> csrfTokenAttribute = this.exchange.getAttribute(CsrfToken.class.getName());
assertThat(csrfTokenAttribute).isNotNull();
assertThat(csrfTokenAttribute).isEqualTo(csrfToken);
}
@Test
public void resolveCsrfTokenValueWhenExchangeIsNullThenThrowsIllegalArgumentException() {
// @formatter:off
assertThatIllegalArgumentException()
.isThrownBy(() -> this.handler.resolveCsrfTokenValue(null, this.token))
.withMessage("exchange cannot be null");
// @formatter:on
}
@Test
public void resolveCsrfTokenValueWhenCsrfTokenIsNullThenThrowsIllegalArgumentException() {
// @formatter:off
assertThatIllegalArgumentException()
.isThrownBy(() -> this.handler.resolveCsrfTokenValue(this.exchange, null))
.withMessage("csrfToken cannot be null");
// @formatter:on
}
@Test
public void resolveCsrfTokenValueWhenTokenNotSetThenReturnsEmptyMono() {
Mono<String> csrfToken = this.handler.resolveCsrfTokenValue(this.exchange, this.token);
StepVerifier.create(csrfToken).verifyComplete();
}
@Test
public void resolveCsrfTokenValueWhenFormDataSetThenReturnsTokenValue() {
this.exchange = MockServerWebExchange
.builder(MockServerHttpRequest.post("/")
.header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_FORM_URLENCODED_VALUE)
.body(this.token.getParameterName() + "=" + this.token.getToken()))
.build();
Mono<String> csrfToken = this.handler.resolveCsrfTokenValue(this.exchange, this.token);
StepVerifier.create(csrfToken).expectNext(this.token.getToken()).verifyComplete();
}
@Test
public void resolveCsrfTokenValueWhenHeaderSetThenReturnsTokenValue() {
this.exchange = MockServerWebExchange
.builder(MockServerHttpRequest.post("/")
.header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_FORM_URLENCODED_VALUE)
.header(this.token.getHeaderName(), this.token.getToken()))
.build();
Mono<String> csrfToken = this.handler.resolveCsrfTokenValue(this.exchange, this.token);
StepVerifier.create(csrfToken).expectNext(this.token.getToken()).verifyComplete();
}
@Test
public void resolveCsrfTokenValueWhenHeaderAndFormDataSetThenFormDataIsPreferred() {
this.exchange = MockServerWebExchange
.builder(MockServerHttpRequest.post("/")
.header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_FORM_URLENCODED_VALUE)
.header(this.token.getHeaderName(), "header")
.body(this.token.getParameterName() + "=" + this.token.getToken()))
.build();
Mono<String> csrfToken = this.handler.resolveCsrfTokenValue(this.exchange, this.token);
StepVerifier.create(csrfToken).expectNext(this.token.getToken()).verifyComplete();
}
}
|
ServerCsrfTokenRequestAttributeHandlerTests
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/RequestMappingHandlerAdapterIntegrationTests.java
|
{
"start": 26811,
"end": 27458
}
|
class ____ implements HandlerMethodArgumentResolver {
@Override
public boolean supportsParameter(MethodParameter parameter) {
return (Principal.class.isAssignableFrom(parameter.getParameterType()) &&
parameter.hasParameterAnnotation(AuthenticationPrincipal.class));
}
@Override
public @Nullable Object resolveArgument(
MethodParameter parameter, @Nullable ModelAndViewContainer mavContainer,
NativeWebRequest webRequest, @Nullable WebDataBinderFactory binderFactory) {
return (Principal) () -> "Custom User";
}
}
@Target(ElementType.PARAMETER)
@Retention(RetentionPolicy.RUNTIME)
@
|
CustomPrincipalArgumentResolver
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/SmtpsComponentBuilderFactory.java
|
{
"start": 1384,
"end": 1835
}
|
interface ____ {
/**
* SMTPS (camel-mail)
* Send and receive emails using imap, pop3 and smtp protocols.
*
* Category: mail
* Since: 1.0
* Maven coordinates: org.apache.camel:camel-mail
*
* @return the dsl builder
*/
static SmtpsComponentBuilder smtps() {
return new SmtpsComponentBuilderImpl();
}
/**
* Builder for the SMTPS component.
*/
|
SmtpsComponentBuilderFactory
|
java
|
quarkusio__quarkus
|
extensions/smallrye-jwt/deployment/src/test/java/io/quarkus/jwt/test/JwtParserUnitTest.java
|
{
"start": 409,
"end": 2152
}
|
class ____ {
private static Class<?>[] testClasses = {
JwtParserEndpoint.class
};
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(testClasses)
.addAsResource("publicKey.pem")
.addAsResource("privateKey.pem")
.addAsResource("applicationJwtParser.properties", "application.properties"));
@Test
public void verifyTokenWithoutIssuedAt() throws Exception {
RestAssured.given().auth()
.oauth2(generateTokenWithoutIssuedAt())
.get("/parser/name")
.then().assertThat().statusCode(200)
.body(equalTo("alice"));
}
@Test
public void verifyTokenWithoutIssuedAtWithKey() throws Exception {
RestAssured.given().auth()
.oauth2(generateTokenWithoutIssuedAt())
.get("/parser/name-with-key")
.then().assertThat().statusCode(200)
.body(equalTo("alice"));
}
private String generateTokenWithoutIssuedAt() throws Exception {
String payload = "{"
+ "\"sub\":\"alice\","
+ "\"iss\":\"https://server.example.com\","
+ "\"exp\":" + (System.currentTimeMillis() / 1000 + 5) + ","
+ "}";
JsonWebSignature jws = new JsonWebSignature();
jws.setPayload(payload);
jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.RSA_USING_SHA256);
PrivateKey privateKey = KeyUtils.readPrivateKey("privateKey.pem");
jws.setKey(privateKey);
return jws.getCompactSerialization();
}
}
|
JwtParserUnitTest
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/metrics/TestRouterClientMetrics.java
|
{
"start": 1767,
"end": 2609
}
|
class ____ {
private static final Configuration CONF = new HdfsConfiguration();
private static final String ROUTER_METRICS = "RouterClientActivity";
static {
CONF.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, 100);
CONF.setInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, 1);
CONF.setLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1L);
CONF.setInt(DFSConfigKeys.DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_KEY, 1);
}
private static final int NUM_SUBCLUSTERS = 2;
private static final int NUM_DNS = 3;
/** Federated HDFS cluster. */
private static MiniRouterDFSCluster cluster;
/** The first Router Context for this federated cluster. */
private MiniRouterDFSCluster.RouterContext routerContext;
/** The first Router for this federated cluster. */
private Router router;
/** Filesystem
|
TestRouterClientMetrics
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/TopologyDescription.java
|
{
"start": 1971,
"end": 2387
}
|
interface ____ {
/**
* A connected sub-graph of a {@link Topology}.
* <p>
* Nodes of a {@code Subtopology} are connected
* {@link Topology#addProcessor(String, ProcessorSupplier, String...) directly} or indirectly via
* {@link Topology#connectProcessorAndStateStores(String, String...) state stores}
* (i.e., if multiple processors share the same state).
*/
|
TopologyDescription
|
java
|
quarkusio__quarkus
|
extensions/amazon-lambda/deployment/src/test/java/io/quarkus/amazon/lambda/deployment/RequestHandlerJandexUtilTest.java
|
{
"start": 11691,
"end": 13395
}
|
class
____(ConcreteParent.class.getName(), definition.method().declaringClass().name().toString());
}
@Test
public void testAbstractClassWithConcreteMethod() {
RequestHandlerJandexDefinition definition = RequestHandlerJandexUtil
.discoverHandlerMethod(InheritsFromAbstractWithConcrete.class.getName(), index);
assertNotNull(definition);
assertEquals("handleRequest", definition.method().name());
assertEquals(String.class.getName(), definition.inputOutputTypes().inputType().name().toString());
assertEquals(String.class.getName(), definition.inputOutputTypes().outputType().name().toString());
// Should find the concrete method in the abstract parent
assertEquals(AbstractWithConcrete.class.getName(), definition.method().declaringClass().name().toString());
}
@Test
public void testPurelyAbstractShouldFail() {
assertThrows(IllegalStateException.class, () -> {
RequestHandlerJandexUtil.discoverHandlerMethod(PurelyAbstractHandler.class.getName(), index);
});
}
@Test
public void testNestedInterfaceDefault() {
RequestHandlerJandexDefinition definition = RequestHandlerJandexUtil
.discoverHandlerMethod(NestedInterfaceHandler.class.getName(), index);
assertNotNull(definition);
assertEquals("handleRequest", definition.method().name());
assertEquals(Float.class.getName(), definition.inputOutputTypes().inputType().name().toString());
assertEquals(Byte.class.getName(), definition.inputOutputTypes().outputType().name().toString());
// Should find the default method in the nested
|
assertEquals
|
java
|
spring-projects__spring-framework
|
spring-aop/src/main/java/org/springframework/aop/aspectj/MethodInvocationProceedingJoinPoint.java
|
{
"start": 5386,
"end": 8601
}
|
class ____ implements MethodSignature {
private volatile @Nullable String @Nullable [] parameterNames;
@Override
public String getName() {
return methodInvocation.getMethod().getName();
}
@Override
public int getModifiers() {
return methodInvocation.getMethod().getModifiers();
}
@Override
public Class<?> getDeclaringType() {
return methodInvocation.getMethod().getDeclaringClass();
}
@Override
public String getDeclaringTypeName() {
return methodInvocation.getMethod().getDeclaringClass().getName();
}
@Override
public Class<?> getReturnType() {
return methodInvocation.getMethod().getReturnType();
}
@Override
public Method getMethod() {
return methodInvocation.getMethod();
}
@Override
public Class<?>[] getParameterTypes() {
return methodInvocation.getMethod().getParameterTypes();
}
@Override
@SuppressWarnings("NullAway") // Overridden method does not define nullness
public @Nullable String @Nullable [] getParameterNames() {
@Nullable String[] parameterNames = this.parameterNames;
if (parameterNames == null) {
parameterNames = parameterNameDiscoverer.getParameterNames(getMethod());
this.parameterNames = parameterNames;
}
return parameterNames;
}
@Override
public Class<?>[] getExceptionTypes() {
return methodInvocation.getMethod().getExceptionTypes();
}
@Override
public String toShortString() {
return toString(false, false, false, false);
}
@Override
public String toLongString() {
return toString(true, true, true, true);
}
@Override
public String toString() {
return toString(false, true, false, true);
}
private String toString(boolean includeModifier, boolean includeReturnTypeAndArgs,
boolean useLongReturnAndArgumentTypeName, boolean useLongTypeName) {
StringBuilder sb = new StringBuilder();
if (includeModifier) {
sb.append(Modifier.toString(getModifiers()));
sb.append(' ');
}
if (includeReturnTypeAndArgs) {
appendType(sb, getReturnType(), useLongReturnAndArgumentTypeName);
sb.append(' ');
}
appendType(sb, getDeclaringType(), useLongTypeName);
sb.append('.');
sb.append(getMethod().getName());
sb.append('(');
Class<?>[] parametersTypes = getParameterTypes();
appendTypes(sb, parametersTypes, includeReturnTypeAndArgs, useLongReturnAndArgumentTypeName);
sb.append(')');
return sb.toString();
}
private void appendTypes(StringBuilder sb, Class<?>[] types, boolean includeArgs,
boolean useLongReturnAndArgumentTypeName) {
if (includeArgs) {
for (int size = types.length, i = 0; i < size; i++) {
appendType(sb, types[i], useLongReturnAndArgumentTypeName);
if (i < size - 1) {
sb.append(',');
}
}
}
else {
if (types.length != 0) {
sb.append("..");
}
}
}
private void appendType(StringBuilder sb, Class<?> type, boolean useLongTypeName) {
if (type.isArray()) {
appendType(sb, type.componentType(), useLongTypeName);
sb.append("[]");
}
else {
sb.append(useLongTypeName ? type.getName() : type.getSimpleName());
}
}
}
/**
* Lazily initialized SourceLocation.
*/
private
|
MethodSignatureImpl
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java
|
{
"start": 1387,
"end": 1839
}
|
class ____ {
public static final ActionType<AcknowledgedResponse> INSTANCE = new ActionType<>("cluster:admin/xpack/security/settings/update");
// The names here are separate constants for 2 reasons:
// 1. Keeping the names defined here helps ensure REST compatibility, even if the internal aliases of these indices change,
// 2. The actual constants for these indices are in the security package, whereas this
|
UpdateSecuritySettingsAction
|
java
|
apache__dubbo
|
dubbo-config/dubbo-config-api/src/test/java/org/apache/dubbo/config/integration/single/SingleRegistryCenterIntegrationServiceImpl.java
|
{
"start": 1001,
"end": 1371
}
|
class ____ implements SingleRegistryCenterIntegrationService {
private static final Logger logger = LoggerFactory.getLogger(SingleRegistryCenterIntegrationServiceImpl.class);
@Override
public String hello(String name) {
String value = "Hello " + name;
logger.info(value);
return value;
}
}
|
SingleRegistryCenterIntegrationServiceImpl
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/slotmanager/TestingResourceAllocationStrategy.java
|
{
"start": 1294,
"end": 3220
}
|
class ____ implements ResourceAllocationStrategy {
private final BiFunction<
Map<JobID, Collection<ResourceRequirement>>,
TaskManagerResourceInfoProvider,
ResourceAllocationResult>
tryFulfillRequirementsFunction;
private final Function<TaskManagerResourceInfoProvider, ResourceReconcileResult>
tryReleaseUnusedResourcesFunction;
private TestingResourceAllocationStrategy(
BiFunction<
Map<JobID, Collection<ResourceRequirement>>,
TaskManagerResourceInfoProvider,
ResourceAllocationResult>
tryFulfillRequirementsFunction,
Function<TaskManagerResourceInfoProvider, ResourceReconcileResult>
tryReleaseUnusedResourcesFunction) {
this.tryFulfillRequirementsFunction =
Preconditions.checkNotNull(tryFulfillRequirementsFunction);
this.tryReleaseUnusedResourcesFunction =
Preconditions.checkNotNull(tryReleaseUnusedResourcesFunction);
}
@Override
public ResourceAllocationResult tryFulfillRequirements(
Map<JobID, Collection<ResourceRequirement>> missingResources,
TaskManagerResourceInfoProvider taskManagerResourceInfoProvider,
BlockedTaskManagerChecker blockedTaskManagerChecker) {
return tryFulfillRequirementsFunction.apply(
missingResources, taskManagerResourceInfoProvider);
}
@Override
public ResourceReconcileResult tryReconcileClusterResources(
TaskManagerResourceInfoProvider taskManagerResourceInfoProvider) {
return tryReleaseUnusedResourcesFunction.apply(taskManagerResourceInfoProvider);
}
public static Builder newBuilder() {
return new Builder();
}
public static
|
TestingResourceAllocationStrategy
|
java
|
netty__netty
|
transport/src/main/java/io/netty/channel/AdaptiveRecvByteBufAllocator.java
|
{
"start": 1277,
"end": 1851
}
|
class ____ extends DefaultMaxMessagesRecvByteBufAllocator {
public static final int DEFAULT_MINIMUM = 64;
// Use an initial value that is bigger than the common MTU of 1500
public static final int DEFAULT_INITIAL = 2048;
public static final int DEFAULT_MAXIMUM = 65536;
/**
* @deprecated There is state for {@link #maxMessagesPerRead()} which is typically based upon channel type.
*/
@Deprecated
public static final AdaptiveRecvByteBufAllocator DEFAULT = new AdaptiveRecvByteBufAllocator();
private final
|
AdaptiveRecvByteBufAllocator
|
java
|
grpc__grpc-java
|
s2a/src/main/java/io/grpc/s2a/internal/handshaker/S2AConnectionException.java
|
{
"start": 812,
"end": 936
}
|
class ____ extends RuntimeException {
S2AConnectionException(String message) {
super(message);
}
}
|
S2AConnectionException
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/SmooksEndpointBuilderFactory.java
|
{
"start": 2606,
"end": 6383
}
|
interface ____
extends
EndpointProducerBuilder {
default SmooksEndpointBuilder basic() {
return (SmooksEndpointBuilder) this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedSmooksEndpointBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedSmooksEndpointBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Allow execution context to be set from the
* CamelSmooksExecutionContext header.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param allowExecutionContextFromHeader the value to set
* @return the dsl builder
*/
default AdvancedSmooksEndpointBuilder allowExecutionContextFromHeader(Boolean allowExecutionContextFromHeader) {
doSetProperty("allowExecutionContextFromHeader", allowExecutionContextFromHeader);
return this;
}
/**
* Allow execution context to be set from the
* CamelSmooksExecutionContext header.
*
* The option will be converted to a <code>java.lang.Boolean</code>
* type.
*
* Default: false
* Group: advanced
*
* @param allowExecutionContextFromHeader the value to set
* @return the dsl builder
*/
default AdvancedSmooksEndpointBuilder allowExecutionContextFromHeader(String allowExecutionContextFromHeader) {
doSetProperty("allowExecutionContextFromHeader", allowExecutionContextFromHeader);
return this;
}
}
public
|
AdvancedSmooksEndpointBuilder
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_2743/Issue2743Mapper.java
|
{
"start": 388,
"end": 517
}
|
interface ____ {
@BeanMapping(ignoreUnmappedSourceProperties = { "number" })
Target map(Source source);
|
Issue2743Mapper
|
java
|
junit-team__junit5
|
junit-platform-reporting/src/main/java/org/junit/platform/reporting/legacy/LegacyReportingUtils.java
|
{
"start": 1080,
"end": 1179
}
|
class ____ {
private LegacyReportingUtils() {
/* no-op */
}
/**
* Get the
|
LegacyReportingUtils
|
java
|
spring-projects__spring-security
|
data/src/test/java/org/springframework/security/data/aot/hint/AuthorizeReturnObjectDataHintsRegistrarTests.java
|
{
"start": 2862,
"end": 2987
}
|
class ____ {
@AuthorizeReturnObject
public MySubObject get() {
return new MySubObject();
}
}
public static
|
MyObject
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/query/sqm/tree/expression/SqmHqlNumericLiteral.java
|
{
"start": 1284,
"end": 4662
}
|
class ____<N extends Number> extends SqmLiteral<N> {
private final String literalValue;
private final NumericTypeCategory typeCategory;
private BasicDomainType<N> type;
public SqmHqlNumericLiteral(
String literalValue,
BasicDomainType<N> type,
NodeBuilder criteriaBuilder) {
this( literalValue,
interpretCategory( literalValue, castNonNull( criteriaBuilder.resolveExpressible( type ) ) ),
type, criteriaBuilder );
this.type = type;
}
public SqmHqlNumericLiteral(
String literalValue,
NumericTypeCategory typeCategory,
BasicDomainType<N> type,
NodeBuilder criteriaBuilder) {
super( criteriaBuilder.resolveExpressible( type ), criteriaBuilder );
this.literalValue = literalValue;
this.typeCategory = typeCategory;
this.type = type;
}
public String getUnparsedLiteralValue() {
return literalValue;
}
@Override
public @NonNull N getLiteralValue() {
return typeCategory.parseLiteralValue( literalValue );
}
public NumericTypeCategory getTypeCategory() {
return typeCategory;
}
@Override
public <X> X accept(SemanticQueryWalker<X> walker) {
return walker.visitHqlNumericLiteral( this );
}
@Override
public void appendHqlString(StringBuilder hql, SqmRenderContext context) {
hql.append( literalValue )
.append( switch ( typeCategory ) {
case BIG_DECIMAL -> "bd";
case FLOAT -> "f";
case BIG_INTEGER -> "bi";
case LONG -> "l";
case INTEGER, DOUBLE -> "";
} );
}
@Override
public boolean equals(@Nullable Object object) {
return object instanceof SqmHqlNumericLiteral<?> that
&& literalValue.equals( that.literalValue )
&& typeCategory.equals( that.typeCategory );
}
@Override
public int hashCode() {
int result = literalValue.hashCode();
result = 31 * result + typeCategory.hashCode();
return result;
}
@Override
public boolean isCompatible(Object object) {
return equals( object );
}
@Override
public int cacheHashCode() {
return hashCode();
}
@Override
public String asLoggableText() {
final StringBuilder stringBuilder = new StringBuilder();
appendHqlString( stringBuilder, SqmRenderContext.simpleContext() );
return stringBuilder.toString();
}
@Override
public SqmHqlNumericLiteral<N> copy(SqmCopyContext context) {
return new SqmHqlNumericLiteral<>( literalValue, typeCategory, type, nodeBuilder() );
}
private static <N extends Number> NumericTypeCategory interpretCategory(String literalValue, SqmExpressible<N> type) {
assert type != null;
final JavaType<N> javaTypeDescriptor = type.getExpressibleJavaType();
assert javaTypeDescriptor != null;
final Class<N> javaTypeClass = javaTypeDescriptor.getJavaTypeClass();
if ( BigDecimal.class.equals( javaTypeClass ) ) {
return NumericTypeCategory.BIG_DECIMAL;
}
if ( Double.class.equals( javaTypeClass ) ) {
return NumericTypeCategory.DOUBLE;
}
if ( Float.class.equals( javaTypeClass ) ) {
return NumericTypeCategory.FLOAT;
}
if ( BigInteger.class.equals( javaTypeClass ) ) {
return NumericTypeCategory.BIG_INTEGER;
}
if ( Long.class.equals( javaTypeClass ) ) {
return NumericTypeCategory.LONG;
}
if ( Short.class.equals( javaTypeClass )
|| Integer.class.equals( javaTypeClass ) ) {
return NumericTypeCategory.INTEGER;
}
throw new TypeException( literalValue, javaTypeClass );
}
public static
|
SqmHqlNumericLiteral
|
java
|
apache__camel
|
components/camel-guava-eventbus/src/generated/java/org/apache/camel/component/guava/eventbus/GuavaEventBusEndpointUriFactory.java
|
{
"start": 524,
"end": 2344
}
|
class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = ":eventBusRef";
private static final Set<String> PROPERTY_NAMES;
private static final Set<String> SECRET_PROPERTY_NAMES;
private static final Map<String, String> MULTI_VALUE_PREFIXES;
static {
Set<String> props = new HashSet<>(7);
props.add("bridgeErrorHandler");
props.add("eventBusRef");
props.add("eventClass");
props.add("exceptionHandler");
props.add("exchangePattern");
props.add("lazyStartProducer");
props.add("listenerInterface");
PROPERTY_NAMES = Collections.unmodifiableSet(props);
SECRET_PROPERTY_NAMES = Collections.emptySet();
MULTI_VALUE_PREFIXES = Collections.emptyMap();
}
@Override
public boolean isEnabled(String scheme) {
return "guava-eventbus".equals(scheme);
}
@Override
public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "eventBusRef", null, false, copy);
uri = buildQueryParameters(uri, copy, encode);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public Set<String> secretPropertyNames() {
return SECRET_PROPERTY_NAMES;
}
@Override
public Map<String, String> multiValuePrefixes() {
return MULTI_VALUE_PREFIXES;
}
@Override
public boolean isLenientProperties() {
return false;
}
}
|
GuavaEventBusEndpointUriFactory
|
java
|
spring-projects__spring-framework
|
spring-webflux/src/main/java/org/springframework/web/reactive/function/server/ServerRequest.java
|
{
"start": 18532,
"end": 22085
}
|
interface ____ {
/**
* Set the method of the request.
* @param method the new method
* @return this builder
*/
Builder method(HttpMethod method);
/**
* Set the URI of the request.
* @param uri the new URI
* @return this builder
*/
Builder uri(URI uri);
/**
* Set the context path of the request.
* @param contextPath the new context path
* @return this builder
* @since 5.3.23
*/
Builder contextPath(@Nullable String contextPath);
/**
* Add the given header value(s) under the given name.
* @param headerName the header name
* @param headerValues the header value(s)
* @return this builder
* @see HttpHeaders#add(String, String)
*/
Builder header(String headerName, String... headerValues);
/**
* Manipulate this request's headers with the given consumer.
* <p>The headers provided to the consumer are "live", so that the consumer can be used to
* {@linkplain HttpHeaders#set(String, String) overwrite} existing header values,
* {@linkplain HttpHeaders#remove(String) remove} values, or use any of the other
* {@link HttpHeaders} methods.
* @param headersConsumer a function that consumes the {@code HttpHeaders}
* @return this builder
*/
Builder headers(Consumer<HttpHeaders> headersConsumer);
/**
* Add a cookie with the given name and value(s).
* @param name the cookie name
* @param values the cookie value(s)
* @return this builder
*/
Builder cookie(String name, String... values);
/**
* Manipulate this request's cookies with the given consumer.
* <p>The map provided to the consumer is "live", so that the consumer can be used to
* {@linkplain MultiValueMap#set(Object, Object) overwrite} existing cookies,
* {@linkplain MultiValueMap#remove(Object) remove} cookies, or use any of the other
* {@link MultiValueMap} methods.
* @param cookiesConsumer a function that consumes the cookies map
* @return this builder
*/
Builder cookies(Consumer<MultiValueMap<String, HttpCookie>> cookiesConsumer);
/**
* Set the body of the request.
* <p>Calling this methods will
* {@linkplain org.springframework.core.io.buffer.DataBufferUtils#release(DataBuffer) release}
* the existing body of the builder.
* @param body the new body
* @return this builder
*/
Builder body(Flux<DataBuffer> body);
/**
* Set the body of the request to the UTF-8 encoded bytes of the given string.
* <p>Calling this methods will
* {@linkplain org.springframework.core.io.buffer.DataBufferUtils#release(DataBuffer) release}
* the existing body of the builder.
* @param body the new body
* @return this builder
*/
Builder body(String body);
/**
* Add an attribute with the given name and value.
* @param name the attribute name
* @param value the attribute value
* @return this builder
*/
Builder attribute(String name, Object value);
/**
* Manipulate this request's attributes with the given consumer.
* <p>The map provided to the consumer is "live", so that the consumer can be used
* to {@linkplain Map#put(Object, Object) overwrite} existing attributes,
* {@linkplain Map#remove(Object) remove} attributes, or use any of the other
* {@link Map} methods.
* @param attributesConsumer a function that consumes the attributes map
* @return this builder
*/
Builder attributes(Consumer<Map<String, Object>> attributesConsumer);
/**
* Build the request.
* @return the built request
*/
ServerRequest build();
}
}
|
Builder
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/objects/data/AlwaysEqualAddress.java
|
{
"start": 673,
"end": 787
}
|
class ____ extends Address {
@Override
public boolean equals(Object o) {
return true;
}
}
|
AlwaysEqualAddress
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KubernetesPodsEndpointBuilderFactory.java
|
{
"start": 46695,
"end": 48175
}
|
interface ____
extends
AdvancedKubernetesPodsEndpointConsumerBuilder,
AdvancedKubernetesPodsEndpointProducerBuilder {
default KubernetesPodsEndpointBuilder basic() {
return (KubernetesPodsEndpointBuilder) this;
}
/**
* Connection timeout in milliseconds to use when making requests to the
* Kubernetes API server.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Group: advanced
*
* @param connectionTimeout the value to set
* @return the dsl builder
*/
default AdvancedKubernetesPodsEndpointBuilder connectionTimeout(Integer connectionTimeout) {
doSetProperty("connectionTimeout", connectionTimeout);
return this;
}
/**
* Connection timeout in milliseconds to use when making requests to the
* Kubernetes API server.
*
* The option will be converted to a <code>java.lang.Integer</code>
* type.
*
* Group: advanced
*
* @param connectionTimeout the value to set
* @return the dsl builder
*/
default AdvancedKubernetesPodsEndpointBuilder connectionTimeout(String connectionTimeout) {
doSetProperty("connectionTimeout", connectionTimeout);
return this;
}
}
public
|
AdvancedKubernetesPodsEndpointBuilder
|
java
|
dropwizard__dropwizard
|
dropwizard-jersey/src/main/java/io/dropwizard/jersey/optional/EmptyOptionalExceptionMapper.java
|
{
"start": 242,
"end": 486
}
|
class ____ implements ExceptionMapper<EmptyOptionalException> {
@Override
public Response toResponse(EmptyOptionalException exception) {
return Response.status(Response.Status.NOT_FOUND).build();
}
}
|
EmptyOptionalExceptionMapper
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/jdk/JDKStringLikeTypeDeserTest.java
|
{
"start": 906,
"end": 1209
}
|
class ____
{
public String name = "bar";
public Class<String> clazz ;
public ParamClassBean() { }
public ParamClassBean(String name) {
this.name = name;
clazz = String.class;
}
}
// [databind#429]
static
|
ParamClassBean
|
java
|
micronaut-projects__micronaut-core
|
core-processor/src/main/java/io/micronaut/inject/processing/ProcessingException.java
|
{
"start": 892,
"end": 1524
}
|
class ____ extends RuntimeException {
private final transient Element originatingElement;
public ProcessingException(Element element, String message) {
super(message);
this.originatingElement = element;
}
public ProcessingException(Element originatingElement, String message, Throwable cause) {
super(message, cause);
this.originatingElement = originatingElement;
}
@Nullable
public Object getOriginatingElement() {
if (originatingElement != null) {
return originatingElement.getNativeType();
}
return null;
}
}
|
ProcessingException
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/jmx/access/ConnectorDelegate.java
|
{
"start": 1257,
"end": 2864
}
|
class ____ {
private static final Log logger = LogFactory.getLog(ConnectorDelegate.class);
private @Nullable JMXConnector connector;
/**
* Connects to the remote {@code MBeanServer} using the configured {@code JMXServiceURL}:
* to the specified JMX service, or to a local MBeanServer if no service URL specified.
* @param serviceUrl the JMX service URL to connect to (may be {@code null})
* @param environment the JMX environment for the connector (may be {@code null})
* @param agentId the local JMX MBeanServer's agent id (may be {@code null})
*/
public MBeanServerConnection connect(@Nullable JMXServiceURL serviceUrl, @Nullable Map<String, ?> environment, @Nullable String agentId)
throws MBeanServerNotFoundException {
if (serviceUrl != null) {
if (logger.isDebugEnabled()) {
logger.debug("Connecting to remote MBeanServer at URL [" + serviceUrl + "]");
}
try {
this.connector = JMXConnectorFactory.connect(serviceUrl, environment);
return this.connector.getMBeanServerConnection();
}
catch (IOException ex) {
throw new MBeanServerNotFoundException("Could not connect to remote MBeanServer [" + serviceUrl + "]", ex);
}
}
else {
logger.debug("Attempting to locate local MBeanServer");
return JmxUtils.locateMBeanServer(agentId);
}
}
/**
* Closes any {@code JMXConnector} that may be managed by this interceptor.
*/
public void close() {
if (this.connector != null) {
try {
this.connector.close();
}
catch (IOException ex) {
logger.debug("Could not close JMX connector", ex);
}
}
}
}
|
ConnectorDelegate
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/collect/ReserializedImmutableSortedMapMapInterfaceTest.java
|
{
"start": 880,
"end": 1353
}
|
class ____
extends AbstractImmutableSortedMapMapInterfaceTest<String, Integer> {
@Override
protected SortedMap<String, Integer> makePopulatedMap() {
return SerializableTester.reserialize(ImmutableSortedMap.of("one", 1, "two", 2, "three", 3));
}
@Override
protected String getKeyNotInPopulatedMap() {
return "minus one";
}
@Override
protected Integer getValueNotInPopulatedMap() {
return -1;
}
}
|
ReserializedImmutableSortedMapMapInterfaceTest
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSource.java
|
{
"start": 927,
"end": 1035
}
|
class ____<VS extends ValuesSource> {
protected Map<String, VS> values;
public static
|
MultiValuesSource
|
java
|
elastic__elasticsearch
|
x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkAction.java
|
{
"start": 1507,
"end": 5910
}
|
class ____ extends BaseRestHandler {
public static final String MONITORING_ID = "system_id";
public static final String MONITORING_VERSION = "system_api_version";
public static final String INTERVAL = "interval";
private static final List<String> ALL_VERSIONS = asList(
MonitoringTemplateUtils.TEMPLATE_VERSION,
MonitoringTemplateUtils.OLD_TEMPLATE_VERSION
);
private static final Map<MonitoredSystem, List<String>> SUPPORTED_API_VERSIONS = Map.of(
MonitoredSystem.KIBANA,
ALL_VERSIONS,
MonitoredSystem.LOGSTASH,
ALL_VERSIONS,
MonitoredSystem.BEATS,
ALL_VERSIONS
);
@Override
public List<Route> routes() {
return List.of(new Route(POST, "/_monitoring/bulk"), new Route(PUT, "/_monitoring/bulk"));
}
@Override
public String getName() {
return "monitoring_bulk";
}
@Override
public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
final String id = request.param(MONITORING_ID);
if (Strings.isEmpty(id)) {
throw new IllegalArgumentException("no [" + MONITORING_ID + "] for monitoring bulk request");
}
final String version = request.param(MONITORING_VERSION);
if (Strings.isEmpty(version)) {
throw new IllegalArgumentException("no [" + MONITORING_VERSION + "] for monitoring bulk request");
}
final String intervalAsString = request.param(INTERVAL);
if (Strings.isEmpty(intervalAsString)) {
throw new IllegalArgumentException("no [" + INTERVAL + "] for monitoring bulk request");
}
if (false == request.hasContentOrSourceParam()) {
throw new ElasticsearchParseException("no body content for monitoring bulk request");
}
final MonitoredSystem system = MonitoredSystem.fromSystem(id);
if (isSupportedSystemVersion(system, version) == false) {
throw new IllegalArgumentException(
MONITORING_VERSION + " [" + version + "] is not supported by " + MONITORING_ID + " [" + id + "]"
);
}
final long timestamp = System.currentTimeMillis();
final long intervalMillis = parseTimeValue(intervalAsString, INTERVAL).getMillis();
final MonitoringBulkRequestBuilder requestBuilder = new MonitoringBulkRequestBuilder(client);
var content = request.content();
requestBuilder.add(system, content, request.getXContentType(), timestamp, intervalMillis);
return channel -> requestBuilder.execute(ActionListener.withRef(getRestBuilderListener(channel), content));
}
@Override
public boolean mediaTypesValid(RestRequest request) {
return super.mediaTypesValid(request) && XContentType.supportsDelimitedBulkRequests(request.getXContentType());
}
/**
* Indicate if the given {@link MonitoredSystem} and system api version pair is supported by
* the Monitoring Bulk API.
*
* @param system the {@link MonitoredSystem}
* @param version the system API version
* @return true if supported, false otherwise
*/
private static boolean isSupportedSystemVersion(final MonitoredSystem system, final String version) {
final List<String> monitoredSystem = SUPPORTED_API_VERSIONS.getOrDefault(system, emptyList());
return monitoredSystem.contains(version);
}
static RestBuilderListener<MonitoringBulkResponse> getRestBuilderListener(RestChannel channel) {
return new RestBuilderListener<>(channel) {
@Override
public RestResponse buildResponse(MonitoringBulkResponse response, XContentBuilder builder) throws Exception {
builder.startObject();
{
builder.field("took", response.getTookInMillis());
builder.field("ignored", response.isIgnored());
final MonitoringBulkResponse.Error error = response.getError();
builder.field("errors", error != null);
if (error != null) {
builder.field("error", response.getError());
}
}
builder.endObject();
return new RestResponse(response.status(), builder);
}
};
}
}
|
RestMonitoringBulkAction
|
java
|
quarkusio__quarkus
|
extensions/smallrye-metrics/runtime/src/main/java/io/quarkus/smallrye/metrics/runtime/MicrometerGCMetrics.java
|
{
"start": 2604,
"end": 10720
}
|
class ____ {
private String cause;
private String action;
public CauseAndActionWrapper(String cause, String action) {
this.cause = cause;
this.action = action;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CauseAndActionWrapper that = (CauseAndActionWrapper) o;
return Objects.equals(cause, that.cause) &&
Objects.equals(action, that.action);
}
@Override
public int hashCode() {
return Objects.hash(cause, action);
}
}
// keeps track of maximum gc pause lengths for a given GC cause and action
private Map<CauseAndActionWrapper, AtomicLong> gcPauseMax = new HashMap<>();
// and the same for concurrent GC phases
private Map<CauseAndActionWrapper, AtomicLong> gcPauseMaxConcurrent = new HashMap<>();
// To keep track of notification listeners that we register so we can clean them up later
private Map<NotificationEmitter, NotificationListener> notificationEmitters = new HashMap<>();
public Long getLiveDataSize() {
return liveDataSize.get();
}
public Long getMaxDataSize() {
return maxDataSize.get();
}
public Long getPromotedBytes() {
return promotedBytes.get();
}
public Long getAllocatedBytes() {
return allocatedBytes.get();
}
public void startWatchingNotifications() {
final AtomicLong youngGenSizeAfter = new AtomicLong(0L);
for (GarbageCollectorMXBean mbean : ManagementFactory.getGarbageCollectorMXBeans()) {
if (!(mbean instanceof NotificationEmitter)) {
continue;
}
NotificationListener notificationListener = (notification, ref) -> {
if (!notification.getType().equals(GarbageCollectionNotificationInfo.GARBAGE_COLLECTION_NOTIFICATION)) {
return;
}
CompositeData cd = (CompositeData) notification.getUserData();
GarbageCollectionNotificationInfo notificationInfo = GarbageCollectionNotificationInfo.from(cd);
String gcCause = notificationInfo.getGcCause();
String gcAction = notificationInfo.getGcAction();
GcInfo gcInfo = notificationInfo.getGcInfo();
long duration = gcInfo.getDuration();
MetricRegistry registry = MetricRegistries.get(MetricRegistry.Type.BASE);
String metricName = isConcurrentPhase(gcCause) ? "jvm.gc.concurrent.phase.time" : "jvm.gc.pause";
Map<CauseAndActionWrapper, AtomicLong> mapForStoringMax = isConcurrentPhase(gcCause) ? gcPauseMax
: gcPauseMaxConcurrent;
Tag[] tags = new Tag[] { new Tag("action", gcAction), new Tag("cause", gcCause) };
CauseAndActionWrapper causeAndAction = new CauseAndActionWrapper(gcCause, gcAction);
MetricID pauseSecondsMaxMetricID = new MetricID(metricName + ".seconds.max", tags);
AtomicLong gcPauseMaxValue = mapForStoringMax.computeIfAbsent(causeAndAction, (k) -> new AtomicLong(0));
if (duration > gcPauseMaxValue.get()) {
gcPauseMaxValue.set(duration); // update the maximum GC length if needed
}
if (!registry.getGauges().containsKey(pauseSecondsMaxMetricID)) {
registry.register(new ExtendedMetadataBuilder()
.withName(metricName + ".seconds.max")
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.NONE)
.withDescription("Time spent in GC pause")
.skipsScopeInOpenMetricsExportCompletely(true)
.build(),
new Gauge() {
@Override
public Number getValue() {
return mapForStoringMax.get(causeAndAction).doubleValue() / 1000.0;
}
}, tags);
}
ExtendedMetadata countMetadata = new ExtendedMetadataBuilder()
.withName(metricName + ".seconds.count")
.withType(MetricType.COUNTER)
.withUnit(MetricUnits.NONE)
.withDescription("Time spent in GC pause")
.skipsScopeInOpenMetricsExportCompletely(true)
.withOpenMetricsKeyOverride(metricName.replace(".", "_") + "_seconds_count")
.build();
registry.counter(countMetadata, tags).inc();
registry.counter(new ExtendedMetadataBuilder()
.withName(metricName + ".seconds.sum")
.withType(MetricType.COUNTER)
.withUnit(MetricUnits.MILLISECONDS)
.withDescription("Time spent in GC pause")
.skipsScopeInOpenMetricsExportCompletely(true)
.withOpenMetricsKeyOverride(metricName.replace(".", "_") + "_seconds_sum")
.build(), tags).inc(duration);
// Update promotion and allocation counters
final Map<String, MemoryUsage> before = gcInfo.getMemoryUsageBeforeGc();
final Map<String, MemoryUsage> after = gcInfo.getMemoryUsageAfterGc();
if (oldGenPoolName != null) {
final long oldBefore = before.get(oldGenPoolName).getUsed();
final long oldAfter = after.get(oldGenPoolName).getUsed();
final long delta = oldAfter - oldBefore;
if (delta > 0L) {
promotedBytes.addAndGet(delta);
}
// Some GC implementations such as G1 can reduce the old gen size as part of a minor GC. To track the
// live data size we record the value if we see a reduction in the old gen heap size or
// after a major GC.
if (oldAfter < oldBefore || GcGenerationAge.fromName(notificationInfo.getGcName()) == GcGenerationAge.OLD) {
liveDataSize.set(oldAfter);
final long oldMaxAfter = after.get(oldGenPoolName).getMax();
maxDataSize.set(oldMaxAfter);
}
}
if (youngGenPoolName != null) {
final long youngBefore = before.get(youngGenPoolName).getUsed();
final long youngAfter = after.get(youngGenPoolName).getUsed();
final long delta = youngBefore - youngGenSizeAfter.get();
youngGenSizeAfter.set(youngAfter);
if (delta > 0L) {
allocatedBytes.addAndGet(delta);
}
}
};
NotificationEmitter notificationEmitter = (NotificationEmitter) mbean;
notificationEmitter.addNotificationListener(notificationListener, null, null);
notificationEmitters.put(notificationEmitter, notificationListener);
}
}
public void cleanUp() {
notificationEmitters.forEach((emitter, listener) -> {
try {
emitter.removeNotificationListener(listener);
} catch (ListenerNotFoundException e) {
}
});
}
private boolean isYoungGenPool(String name) {
return name.endsWith("Eden Space");
}
private boolean isOldGenPool(String name) {
return name.endsWith("Old Gen") || name.endsWith("Tenured Gen");
}
private boolean isConcurrentPhase(String cause) {
return "No GC".equals(cause);
}
|
CauseAndActionWrapper
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/ConditionalExpressionNumericPromotionTest.java
|
{
"start": 892,
"end": 1296
}
|
class ____ {
private final BugCheckerRefactoringTestHelper testHelper =
BugCheckerRefactoringTestHelper.newInstance(
ConditionalExpressionNumericPromotion.class, getClass());
@Test
public void positive() {
testHelper
.addInputLines(
"in/Test.java",
"""
import java.io.Serializable;
|
ConditionalExpressionNumericPromotionTest
|
java
|
quarkusio__quarkus
|
integration-tests/logging-panache/src/test/java/io/quarkus/logging/LoggingBean.java
|
{
"start": 3361,
"end": 3998
}
|
class ____<T, U, V> {
private record Item<T, U, V>(T t, U u, V v) {
}
private final List<Item<T, U, V>> list;
static <T, U, V> TriStream<T, U, V> of(T t1, U u1, V v1) {
List<Item<T, U, V>> list = new ArrayList<>();
list.add(new Item<>(t1, u1, v1));
return new TriStream<>(list);
}
private TriStream(List<Item<T, U, V>> list) {
this.list = list;
}
void forEach(TriConsumer<T, U, V> action) {
list.forEach(item -> action.accept(item.t(), item.u(), item.v()));
}
}
@FunctionalInterface
|
TriStream
|
java
|
playframework__playframework
|
documentation/manual/working/javaGuide/main/http/code/javaguide/http/JavaActionsComposition.java
|
{
"start": 1049,
"end": 1772
}
|
class ____ extends play.mvc.Action.Simple {
public CompletionStage<Result> call(Http.Request req) {
log.info("Calling action for {}", req);
return delegate.call(req);
}
}
// #verbose-action
// #verbose-index
@With(VerboseAction.class)
public Result verboseIndex() {
return ok("It works!");
}
// #verbose-index
// #authenticated-cached-index
@Security.Authenticated
@Cached(key = "index.result")
public Result authenticatedCachedIndex() {
return ok("It works!");
}
// #authenticated-cached-index
// #verbose-annotation
@With(VerboseAnnotationAction.class)
@Target({ElementType.TYPE, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
public @
|
VerboseAction
|
java
|
spring-projects__spring-boot
|
module/spring-boot-jdbc/src/main/java/org/springframework/boot/jdbc/autoconfigure/DataSourceProperties.java
|
{
"start": 10389,
"end": 10942
}
|
class ____ extends BeanCreationException {
private final DataSourceProperties properties;
private final EmbeddedDatabaseConnection connection;
DataSourceBeanCreationException(String message, DataSourceProperties properties,
EmbeddedDatabaseConnection connection) {
super(message);
this.properties = properties;
this.connection = connection;
}
DataSourceProperties getProperties() {
return this.properties;
}
EmbeddedDatabaseConnection getConnection() {
return this.connection;
}
}
}
|
DataSourceBeanCreationException
|
java
|
netty__netty
|
transport/src/main/java/io/netty/channel/SingleThreadEventLoop.java
|
{
"start": 8214,
"end": 9497
}
|
class ____<T extends Channel> implements Iterator<Channel> {
private final Iterator<T> channelIterator;
public ChannelsReadOnlyIterator(Iterable<T> channelIterable) {
this.channelIterator =
ObjectUtil.checkNotNull(channelIterable, "channelIterable").iterator();
}
@Override
public boolean hasNext() {
return channelIterator.hasNext();
}
@Override
public Channel next() {
return channelIterator.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
@SuppressWarnings("unchecked")
public static <T> Iterator<T> empty() {
return (Iterator<T>) EMPTY;
}
private static final Iterator<Object> EMPTY = new Iterator<Object>() {
@Override
public boolean hasNext() {
return false;
}
@Override
public Object next() {
throw new NoSuchElementException();
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
};
}
}
|
ChannelsReadOnlyIterator
|
java
|
apache__rocketmq
|
store/src/main/java/org/apache/rocketmq/store/config/BrokerRole.java
|
{
"start": 852,
"end": 918
}
|
enum ____ {
ASYNC_MASTER,
SYNC_MASTER,
SLAVE;
}
|
BrokerRole
|
java
|
spring-projects__spring-framework
|
spring-web/src/jmh/java/org/springframework/http/support/HeadersAdaptersBaseline.java
|
{
"start": 13115,
"end": 16567
}
|
class ____ implements MultiValueMap<String, String> {
private final io.netty.handler.codec.http.HttpHeaders headers;
/**
* Creates a new {@code Netty4HeadersAdapter} based on the given
* {@code HttpHeaders}.
*/
public Netty4(io.netty.handler.codec.http.HttpHeaders headers) {
Assert.notNull(headers, "Headers must not be null");
this.headers = headers;
}
@Override
public @Nullable String getFirst(String key) {
return this.headers.get(key);
}
@Override
public void add(String key, @Nullable String value) {
if (value != null) {
this.headers.add(key, value);
}
}
@Override
public void addAll(String key, List<? extends @Nullable String> values) {
this.headers.add(key, values);
}
@Override
public void addAll(MultiValueMap<String, @Nullable String> values) {
values.forEach(this.headers::add);
}
@Override
public void set(String key, @Nullable String value) {
if (value != null) {
this.headers.set(key, value);
}
}
@Override
public void setAll(Map<String, @Nullable String> values) {
values.forEach(this.headers::set);
}
@Override
public Map<String, String> toSingleValueMap() {
Map<String, String> singleValueMap = CollectionUtils.newLinkedHashMap(this.headers.size());
this.headers.entries()
.forEach(entry -> {
if (!singleValueMap.containsKey(entry.getKey())) {
singleValueMap.put(entry.getKey(), entry.getValue());
}
});
return singleValueMap;
}
@Override
public int size() {
return this.headers.names().size();
}
@Override
public boolean isEmpty() {
return this.headers.isEmpty();
}
@Override
public boolean containsKey(Object key) {
return (key instanceof String headerName && this.headers.contains(headerName));
}
@Override
public boolean containsValue(Object value) {
return (value instanceof String &&
this.headers.entries().stream()
.anyMatch(entry -> value.equals(entry.getValue())));
}
@Override
public @Nullable List<String> get(Object key) {
if (containsKey(key)) {
return this.headers.getAll((String) key);
}
return null;
}
@Override
public @Nullable List<String> put(String key, @Nullable List<String> value) {
List<String> previousValues = this.headers.getAll(key);
this.headers.set(key, value);
return previousValues;
}
@Override
public @Nullable List<String> remove(Object key) {
if (key instanceof String headerName) {
List<String> previousValues = this.headers.getAll(headerName);
this.headers.remove(headerName);
return previousValues;
}
return null;
}
@Override
public void putAll(Map<? extends String, ? extends List<String>> map) {
map.forEach(this.headers::set);
}
@Override
public void clear() {
this.headers.clear();
}
@Override
public Set<String> keySet() {
return new HeaderNames();
}
@Override
public Collection<List<String>> values() {
return this.headers.names().stream()
.map(this.headers::getAll).toList();
}
@Override
public Set<Entry<String, List<String>>> entrySet() {
return new AbstractSet<>() {
@Override
public Iterator<Entry<String, List<String>>> iterator() {
return new EntryIterator();
}
@Override
public int size() {
return headers.size();
}
};
}
@Override
public String toString() {
return HttpHeaders.formatHeaders(this);
}
private
|
Netty4
|
java
|
bumptech__glide
|
library/src/main/java/com/bumptech/glide/load/model/stream/HttpGlideUrlLoader.java
|
{
"start": 2177,
"end": 2583
}
|
class ____ implements ModelLoaderFactory<GlideUrl, InputStream> {
private final ModelCache<GlideUrl, GlideUrl> modelCache = new ModelCache<>(500);
@NonNull
@Override
public ModelLoader<GlideUrl, InputStream> build(MultiModelLoaderFactory multiFactory) {
return new HttpGlideUrlLoader(modelCache);
}
@Override
public void teardown() {
// Do nothing.
}
}
}
|
Factory
|
java
|
google__truth
|
core/src/main/java/com/google/common/truth/IterableSubject.java
|
{
"start": 42755,
"end": 94299
}
|
class ____<A extends @Nullable Object, E extends @Nullable Object> {
private final IterableSubject subject;
private final Correspondence<? super A, ? super E> correspondence;
private final @Nullable Pairer<A, E> pairer;
private final @Nullable Iterable<?> actual;
UsingCorrespondence(
IterableSubject subject, Correspondence<? super A, ? super E> correspondence) {
this.subject = checkNotNull(subject);
this.correspondence = checkNotNull(correspondence);
this.pairer = null;
this.actual = subject.actual;
}
private UsingCorrespondence(
IterableSubject subject,
Correspondence<? super A, ? super E> correspondence,
Pairer<A, E> pairer) {
this.subject = checkNotNull(subject);
this.correspondence = checkNotNull(correspondence);
this.pairer = pairer;
this.actual = subject.actual;
}
/**
* @throws UnsupportedOperationException always
* @deprecated {@link Object#equals(Object)} is not supported on Truth subjects or intermediate
* classes. If you are writing a test assertion (actual vs. expected), use methods liks
* {@link #containsExactlyElementsIn(Iterable)} instead.
*/
@DoNotCall(
"UsingCorrespondence.equals() is not supported. Did you mean to call"
+ " containsExactlyElementsIn(expected) instead of equals(expected)?")
@Deprecated
@Override
public final boolean equals(@Nullable Object other) {
throw new UnsupportedOperationException(
"UsingCorrespondence.equals() is not supported. Did you mean to call"
+ " containsExactlyElementsIn(expected) instead of equals(expected)?");
}
/**
* @throws UnsupportedOperationException always
* @deprecated {@link Object#hashCode()} is not supported on Truth types.
*/
@DoNotCall("UsingCorrespondence.hashCode() is not supported.")
@Deprecated
@Override
public final int hashCode() {
throw new UnsupportedOperationException("UsingCorrespondence.hashCode() is not supported.");
}
/**
* @throws UnsupportedOperationException always
* @deprecated {@link Object#toString()} is not supported on Truth subjects.
*/
@Deprecated
@DoNotCall("UsingCorrespondence.toString() is not supported.")
@Override
public final String toString() {
throw new UnsupportedOperationException(
"UsingCorrespondence.toString() is not supported. Did you mean to call"
+ " assertThat(foo.toString()) instead of assertThat(foo).toString()?");
}
/**
* Specifies a way to pair up unexpected and missing elements in the message when an assertion
* fails. For example:
*
* <pre>{@code
* assertThat(actualRecords)
* .comparingElementsUsing(RECORD_CORRESPONDENCE)
* .displayingDiffsPairedBy(MyRecord::getId)
* .containsExactlyElementsIn(expectedRecords);
* }</pre>
*
* <p><b>Important</b>: The {code keyFunction} function must be able to accept both the actual
* and the unexpected elements, i.e. it must satisfy {@code Function<? super A, ?>} as well as
* {@code Function<? super E, ?>}. If that constraint is not met then a subsequent method may
* throw {@link ClassCastException}. Use the two-parameter overload if you need to specify
* different key functions for the actual and expected elements.
*
* <p>On assertions where it makes sense to do so, the elements are paired as follows: they are
* keyed by {@code keyFunction}, and if an unexpected element and a missing element have the
* same non-null key then they are paired up. (Elements with null keys are not paired.) The
* failure message will show paired elements together, and a diff will be shown if the {@link
* Correspondence#formatDiff} method returns non-null.
*
* <p>The expected elements given in the assertion should be uniquely keyed by {@code
* keyFunction}. If multiple missing elements have the same key then the pairing will be
* skipped.
*
* <p>Useful key functions will have the property that key equality is less strict than the
* correspondence, i.e. given {@code actual} and {@code expected} values with keys {@code
* actualKey} and {@code expectedKey}, if {@code correspondence.compare(actual, expected)} is
* true then it is guaranteed that {@code actualKey} is equal to {@code expectedKey}, but there
* are cases where {@code actualKey} is equal to {@code expectedKey} but {@code
* correspondence.compare(actual, expected)} is false.
*
* <p>If the {@code apply} method on the key function throws an exception then the element will
* be treated as if it had a null key and not paired. (The first such exception will be noted in
* the failure message.)
*
* <p>Note that calling this method makes no difference to whether a test passes or fails, it
* just improves the message if it fails.
*/
public UsingCorrespondence<A, E> displayingDiffsPairedBy(Function<? super E, ?> keyFunction) {
@SuppressWarnings("unchecked") // throwing ClassCastException is the correct behaviour
Function<? super A, ?> actualKeyFunction = (Function<? super A, ?>) keyFunction;
return displayingDiffsPairedBy(actualKeyFunction, keyFunction);
}
/**
* Specifies a way to pair up unexpected and missing elements in the message when an assertion
* fails. For example:
*
* <pre>{@code
* assertThat(actualFoos)
* .comparingElementsUsing(FOO_BAR_CORRESPONDENCE)
* .displayingDiffsPairedBy(Foo::getId, Bar::getFooId)
* .containsExactlyElementsIn(expectedBar);
* }</pre>
*
* <p>On assertions where it makes sense to do so, the elements are paired as follows: the
* unexpected elements are keyed by {@code actualKeyFunction}, the missing elements are keyed by
* {@code expectedKeyFunction}, and if an unexpected element and a missing element have the same
* non-null key then they are paired up. (Elements with null keys are not paired.) The failure
* message will show paired elements together, and a diff will be shown if the {@link
* Correspondence#formatDiff} method returns non-null.
*
* <p>The expected elements given in the assertion should be uniquely keyed by {@code
* expectedKeyFunction}. If multiple missing elements have the same key then the pairing will be
* skipped.
*
* <p>Useful key functions will have the property that key equality is less strict than the
* correspondence, i.e. given {@code actual} and {@code expected} values with keys {@code
* actualKey} and {@code expectedKey}, if {@code correspondence.compare(actual, expected)} is
* true then it is guaranteed that {@code actualKey} is equal to {@code expectedKey}, but there
* are cases where {@code actualKey} is equal to {@code expectedKey} but {@code
* correspondence.compare(actual, expected)} is false.
*
* <p>If the {@code apply} method on either of the key functions throws an exception then the
* element will be treated as if it had a null key and not paired. (The first such exception
* will be noted in the failure message.)
*
* <p>Note that calling this method makes no difference to whether a test passes or fails, it
* just improves the message if it fails.
*/
public UsingCorrespondence<A, E> displayingDiffsPairedBy(
Function<? super A, ?> actualKeyFunction, Function<? super E, ?> expectedKeyFunction) {
return new UsingCorrespondence<>(
subject, correspondence, Pairer.create(actualKeyFunction, expectedKeyFunction));
}
/**
* Checks that the actual iterable contains at least one element that corresponds to the given
* expected element.
*/
/*
* TODO(cpovirk): Do we want @Nullable on usages of E? Probably not, since it could throw errors
* during comparisons? Or maybe we should take the risk for user convenience? If we make
* changes, also make them in MapSubject, MultimapSubject, and possibly others.
*/
public void contains(E expected) {
if (actual == null) {
failWithActual(
factsBuilder()
.add(fact("expected an iterable that contains", expected))
.addAll(correspondence.describeForIterable())
.build());
return;
}
Correspondence.ExceptionStore exceptions = Correspondence.ExceptionStore.forIterable();
for (A actual : castActual(actual)) {
if (correspondence.safeCompare(actual, expected, exceptions)) {
// Found a match, but we still need to fail if we hit an exception along the way.
if (exceptions.hasCompareException()) {
failWithoutActual(
factsBuilder()
.addAll(exceptions.describeAsMainCause())
.add(fact("expected to contain", expected))
.addAll(correspondence.describeForIterable())
.add(fact("found match (but failing because of exception)", actual))
.add(fullContents())
.build());
}
return;
}
}
// Found no match. Fail, reporting elements that have the correct key if there are any.
if (pairer != null) {
List<A> keyMatches = pairer.pairOne(expected, castActual(actual), exceptions);
if (!keyMatches.isEmpty()) {
failWithoutActual(
factsBuilder()
.add(fact("expected to contain", expected))
.addAll(correspondence.describeForIterable())
.add(simpleFact("but did not"))
.addAll(
formatExtras(
"though it did contain elements with correct key",
expected,
keyMatches,
exceptions))
.add(simpleFact("---"))
.add(fullContents())
.addAll(exceptions.describeAsAdditionalInfo())
.build());
return;
}
}
failWithoutActual(
factsBuilder()
.add(fact("expected to contain", expected))
.addAll(correspondence.describeForIterable())
.add(butWas())
.addAll(exceptions.describeAsAdditionalInfo())
.build());
}
/** Checks that none of the actual elements correspond to the given element. */
public void doesNotContain(E element) {
if (actual == null) {
failWithActual(
factsBuilder()
.add(fact("expected an iterable that does not contain", element))
.addAll(correspondence.describeForIterable())
.build());
return;
}
Correspondence.ExceptionStore exceptions = Correspondence.ExceptionStore.forIterable();
List<A> matchingElements = new ArrayList<>();
for (A actual : castActual(actual)) {
if (correspondence.safeCompare(actual, element, exceptions)) {
matchingElements.add(actual);
}
}
// Fail if we found any matches.
if (!matchingElements.isEmpty()) {
failWithoutActual(
factsBuilder()
.add(fact("expected not to contain", element))
.addAll(correspondence.describeForIterable())
.add(fact("but contained", countDuplicates(matchingElements)))
.add(fullContents())
.addAll(exceptions.describeAsAdditionalInfo())
.build());
return;
}
// Found no match, but we still need to fail if we hit an exception along the way.
if (exceptions.hasCompareException()) {
failWithoutActual(
factsBuilder()
.addAll(exceptions.describeAsMainCause())
.add(fact("expected not to contain", element))
.addAll(correspondence.describeForIterable())
.add(simpleFact("found no match (but failing because of exception)"))
.add(fullContents())
.build());
}
}
/**
* Checks that actual iterable contains exactly elements that correspond to the expected
* elements, i.e. that there is a 1:1 mapping between the actual elements and the expected
* elements where each pair of elements correspond.
*
* <p>To also test that the contents appear in the given order, make a call to {@code inOrder()}
* on the object returned by this method.
*
* <p>To test that the iterable contains the elements corresponding to those in an array, prefer
* {@link #containsExactlyElementsIn(Object[])}. It makes clear that the given array is a list
* of elements, not an element itself. This helps human readers and avoids a compiler warning.
*/
@SafeVarargs
@CanIgnoreReturnValue
public final Ordered containsExactly(@Nullable E @Nullable ... expected) {
return containsExactlyElementsIn(SubjectUtils.<E>listifyNullableVarargs(expected));
}
/**
* Checks that actual iterable contains exactly elements that correspond to the expected
* elements, i.e. that there is a 1:1 mapping between the actual elements and the expected
* elements where each pair of elements correspond.
*
* <p>To also test that the contents appear in the given order, make a call to {@code inOrder()}
* on the object returned by this method.
*/
@CanIgnoreReturnValue
public Ordered containsExactlyElementsIn(@Nullable Iterable<? extends E> expected) {
if (expected == null) {
failWithoutActual(
simpleFact("could not perform containment check because expected iterable was null"),
actualContents());
return ALREADY_FAILED;
} else if (actual == null) {
failWithActual("expected an iterable that contains exactly", expected);
return ALREADY_FAILED;
}
List<A> actualList = iterableToList(castActual(actual));
List<? extends E> expectedList = iterableToList(expected);
if (expectedList.isEmpty()) {
if (actualList.isEmpty()) {
return IN_ORDER;
} else {
subject.isEmpty(); // fails
return ALREADY_FAILED;
}
}
// Check if the elements correspond in order. This allows the common case of a passing test
// using inOrder() to complete in linear time.
if (correspondInOrderExactly(actualList.iterator(), expectedList.iterator())) {
return IN_ORDER;
}
// We know they don't correspond in order, so we're going to have to do an any-order test.
// Find a many:many mapping between the indexes of the elements which correspond, and check
// it for completeness.
// Exceptions from Correspondence.compare are stored and treated as if false was returned.
Correspondence.ExceptionStore exceptions = Correspondence.ExceptionStore.forIterable();
ImmutableSetMultimap<Integer, Integer> candidateMapping =
findCandidateMapping(actualList, expectedList, exceptions);
if (failIfCandidateMappingHasMissingOrExtra(
actualList, expectedList, candidateMapping, exceptions)) {
return ALREADY_FAILED;
}
// We know that every expected element maps to at least one actual element, and vice versa.
// Find a maximal 1:1 mapping, and check it for completeness.
ImmutableBiMap<Integer, Integer> maximalOneToOneMapping =
findMaximalOneToOneMapping(candidateMapping);
if (failIfOneToOneMappingHasMissingOrExtra(
actualList, expectedList, maximalOneToOneMapping, exceptions)) {
return ALREADY_FAILED;
}
// Check whether we caught any exceptions from Correspondence.compare. We do the any-order
// assertions treating exceptions as if false was returned before this, because the failure
// messages are normally more useful (e.g. reporting that the actual iterable contained an
// unexpected null) but we are contractually obliged to throw here if the assertions passed.
if (exceptions.hasCompareException()) {
failWithoutActual(
factsBuilder()
.addAll(exceptions.describeAsMainCause())
.add(fact("expected", expected))
.addAll(correspondence.describeForIterable())
.add(simpleFact("found all expected elements (but failing because of exception)"))
.add(fullContents())
.build());
return ALREADY_FAILED;
}
// The 1:1 mapping is complete, so the test succeeds (but we know from above that the mapping
// is not in order).
return () ->
failWithActual(
factsBuilder()
.add(simpleFact("contents match, but order was wrong"))
.add(fact("expected", expected))
.addAll(correspondence.describeForIterable())
.build());
}
/**
* Checks that actual iterable contains exactly elements that correspond to the expected
* elements, i.e. that there is a 1:1 mapping between the actual elements and the expected
* elements where each pair of elements correspond.
*
* <p>To also test that the contents appear in the given order, make a call to {@code inOrder()}
* on the object returned by this method.
*/
@CanIgnoreReturnValue
@SuppressWarnings("AvoidObjectArrays")
public Ordered containsExactlyElementsIn(E @Nullable [] expected) {
if (expected == null) {
failWithoutActual(
simpleFact("could not perform containment check because expected array was null"),
actualContents());
return ALREADY_FAILED;
}
return containsExactlyElementsIn(asList(expected));
}
/**
* Returns whether the actual and expected iterators have the same number of elements and, when
* iterated pairwise, every pair of actual and expected values satisfies the correspondence.
* Returns false if any comparison threw an exception.
*/
private boolean correspondInOrderExactly(
Iterator<? extends A> actual, Iterator<? extends E> expected) {
Correspondence.ExceptionStore exceptions = Correspondence.ExceptionStore.forIterable();
while (actual.hasNext() && expected.hasNext()) {
A actualElement = actual.next();
E expectedElement = expected.next();
// Return false if the elements didn't correspond, or if the correspondence threw an
// exception. We'll fall back on the any-order assertion in this case.
if (!correspondence.safeCompare(actualElement, expectedElement, exceptions)) {
return false;
}
}
// No need to check the ExceptionStore, as we'll already have returned false on any exception.
return !(actual.hasNext() || expected.hasNext());
}
/**
* Given a list of actual elements and a list of expected elements, finds a many:many mapping
* between actual and expected elements where a pair of elements maps if it satisfies the
* correspondence. Returns this mapping as a multimap where the keys are indexes into the actual
* list and the values are indexes into the expected list. Any exceptions are treated as if the
* elements did not correspond, and the exception added to the store.
*/
private ImmutableSetMultimap<Integer, Integer> findCandidateMapping(
List<? extends A> actual,
List<? extends E> expected,
Correspondence.ExceptionStore exceptions) {
ImmutableSetMultimap.Builder<Integer, Integer> mapping = ImmutableSetMultimap.builder();
for (int actualIndex = 0; actualIndex < actual.size(); actualIndex++) {
for (int expectedIndex = 0; expectedIndex < expected.size(); expectedIndex++) {
if (correspondence.safeCompare(
actual.get(actualIndex), expected.get(expectedIndex), exceptions)) {
mapping.put(actualIndex, expectedIndex);
}
}
}
return mapping.build();
}
/**
* Given a list of actual elements, a list of expected elements, and a many:many mapping between
* actual and expected elements specified as a multimap of indexes into the actual list to
* indexes into the expected list, checks that every actual element maps to at least one
* expected element and vice versa, and fails if this is not the case. Returns whether the
* assertion failed.
*/
private boolean failIfCandidateMappingHasMissingOrExtra(
List<? extends A> actual,
List<? extends E> expected,
ImmutableSetMultimap<Integer, Integer> mapping,
Correspondence.ExceptionStore exceptions) {
List<? extends A> extra = findNotIndexed(actual, mapping.keySet());
List<? extends E> missing = findNotIndexed(expected, mapping.inverse().keySet());
if (!missing.isEmpty() || !extra.isEmpty()) {
failWithoutActual(
factsBuilder()
.addAll(describeMissingOrExtra(missing, extra, exceptions))
.add(fact("expected", expected))
.addAll(correspondence.describeForIterable())
.add(butWas())
.addAll(exceptions.describeAsAdditionalInfo())
.build());
return true;
}
return false;
}
/**
* Given a list of missing elements and a list of extra elements, at least one of which must be
* non-empty, returns facts describing them. Exceptions from calling {@link
* Correspondence#formatDiff} are stored in {@code exceptions}.
*/
private ImmutableList<Fact> describeMissingOrExtra(
List<? extends E> missing,
List<? extends A> extra,
Correspondence.ExceptionStore exceptions) {
if (pairer != null) {
Pairing<A, E> pairing = pairer.pair(missing, extra, exceptions);
if (pairing != null) {
return describeMissingOrExtraWithPairing(pairing, exceptions);
} else {
return factsBuilder()
.addAll(describeMissingOrExtraWithoutPairing(missing, extra))
.add(
simpleFact(
"a key function which does not uniquely key the expected elements was"
+ " provided and has consequently been ignored"))
.build();
}
} else if (missing.size() == 1 && !extra.isEmpty()) {
return factsBuilder()
.add(fact("missing (1)", missing.get(0)))
.addAll(formatExtras("unexpected", missing.get(0), extra, exceptions))
.add(simpleFact("---"))
.build();
} else {
return describeMissingOrExtraWithoutPairing(missing, extra);
}
}
private ImmutableList<Fact> describeMissingOrExtraWithoutPairing(
List<? extends E> missing, List<? extends A> extra) {
return makeElementFactsForBoth("missing", missing, "unexpected", extra);
}
private ImmutableList<Fact> describeMissingOrExtraWithPairing(
Pairing<A, E> pairing, Correspondence.ExceptionStore exceptions) {
ImmutableList.Builder<Fact> facts = factsBuilder();
for (Object key : pairing.pairedKeysToExpectedValues.keySet()) {
E missing = pairing.pairedKeysToExpectedValues.get(key);
List<A> extras = pairing.pairedKeysToActualValues.get(key);
facts.add(fact("for key", key));
facts.add(fact("missing", missing));
facts.addAll(formatExtras("unexpected", missing, extras, exceptions));
facts.add(simpleFact("---"));
}
if (!pairing.unpairedActualValues.isEmpty() || !pairing.unpairedExpectedValues.isEmpty()) {
facts.add(simpleFact("elements without matching keys:"));
facts.addAll(
describeMissingOrExtraWithoutPairing(
pairing.unpairedExpectedValues, pairing.unpairedActualValues));
}
return facts.build();
}
private ImmutableList<Fact> formatExtras(
String label,
E missing,
List<? extends A> extras,
Correspondence.ExceptionStore exceptions) {
List<@Nullable String> diffs = new ArrayList<>(extras.size());
boolean hasDiffs = false;
for (int i = 0; i < extras.size(); i++) {
A extra = extras.get(i);
String diff = correspondence.safeFormatDiff(extra, missing, exceptions);
diffs.add(diff);
if (diff != null) {
hasDiffs = true;
}
}
if (hasDiffs) {
ImmutableList.Builder<Fact> extraFacts = factsBuilder();
extraFacts.add(simpleFact(lenientFormat("%s (%s)", label, extras.size())));
for (int i = 0; i < extras.size(); i++) {
A extra = extras.get(i);
extraFacts.add(fact(lenientFormat("#%s", i + 1), extra));
if (diffs.get(i) != null) {
extraFacts.add(fact("diff", diffs.get(i)));
}
}
return extraFacts.build();
} else {
return ImmutableList.of(
fact(lenientFormat("%s (%s)", label, extras.size()), countDuplicates(extras)));
}
}
/**
* Returns all the elements of the given list other than those with the given indexes. Assumes
* that all the given indexes really are valid indexes into the list.
*/
private static <T extends @Nullable Object> List<T> findNotIndexed(
List<T> list, Set<Integer> indexes) {
if (indexes.size() == list.size()) {
// If there are as many distinct valid indexes are there are elements in the list then every
// index must be in there once.
return asList();
}
List<T> notIndexed = new ArrayList<>();
for (int index = 0; index < list.size(); index++) {
if (!indexes.contains(index)) {
notIndexed.add(list.get(index));
}
}
return notIndexed;
}
/**
* Given a many:many mapping between actual elements and expected elements, finds a 1:1 mapping
* which is the subset of that many:many mapping which includes the largest possible number of
* elements. The input and output mappings are each described as a map or multimap where the
* keys are indexes into the actual list and the values are indexes into the expected list. If
* there are multiple possible output mappings tying for the largest possible, this returns an
* arbitrary one.
*/
private static ImmutableBiMap<Integer, Integer> findMaximalOneToOneMapping(
ImmutableMultimap<Integer, Integer> edges) {
/*
* Finding this 1:1 mapping is analogous to finding a maximum cardinality bipartite matching
* (https://en.wikipedia.org/wiki/Matching_(graph_theory)#In_unweighted_bipartite_graphs).
* - The two sets of elements together correspond to the vertices of a graph.
* - The many:many mapping corresponds to the edges of that graph.
* - The graph is therefore bipartite, with the two sets of elements corresponding to the two
* parts.
* - A 1:1 mapping corresponds to a matching on that bipartite graph (aka an independent edge
* set, i.e. a subset of the edges with no common vertices).
* - And the 1:1 mapping which includes the largest possible number of elements corresponds
* to the maximum cardinality matching.
*
* So we'll apply a standard algorithm for doing maximum cardinality bipartite matching.
*/
return GraphMatching.maximumCardinalityBipartiteMatching(edges);
}
/**
* Given a list of actual elements, a list of expected elements, and a 1:1 mapping between
* actual and expected elements specified as a bimap of indexes into the actual list to indexes
* into the expected list, checks that every actual element maps to an expected element and vice
* versa, and fails if this is not the case. Returns whether the assertion failed.
*/
private boolean failIfOneToOneMappingHasMissingOrExtra(
List<? extends A> actual,
List<? extends E> expected,
BiMap<Integer, Integer> mapping,
Correspondence.ExceptionStore exceptions) {
List<? extends A> extra = findNotIndexed(actual, mapping.keySet());
List<? extends E> missing = findNotIndexed(expected, mapping.values());
if (!missing.isEmpty() || !extra.isEmpty()) {
failWithoutActual(
factsBuilder()
.add(
simpleFact(
"in an assertion requiring a 1:1 mapping between the expected and the"
+ " actual elements, each actual element matches as least one expected"
+ " element, and vice versa, but there was no 1:1 mapping"))
.add(
simpleFact(
"using the most complete 1:1 mapping (or one such mapping, if there is a"
+ " tie)"))
.addAll(describeMissingOrExtra(missing, extra, exceptions))
.add(fact("expected", expected))
.addAll(correspondence.describeForIterable())
.add(butWas())
.addAll(exceptions.describeAsAdditionalInfo())
.build());
return true;
}
return false;
}
/**
* Checks that the actual iterable contains elements that correspond to all the expected
* elements, i.e. that there is a 1:1 mapping between any subset of the actual elements and the
* expected elements where each pair of elements correspond.
*
* <p>To also test that the contents appear in the given order, make a call to {@code inOrder()}
* on the object returned by this method. The elements must appear in the given order within the
* actual iterable, but they are not required to be consecutive.
*/
@SafeVarargs
@CanIgnoreReturnValue
public final Ordered containsAtLeast(E first, E second, E @Nullable ... rest) {
return containsAtLeastElementsIn(accumulate(first, second, rest));
}
/**
* Checks that the actual iterable contains elements that correspond to all the expected
* elements, i.e. that there is a 1:1 mapping between any subset of the actual elements and the
* expected elements where each pair of elements correspond.
*
* <p>To also test that the contents appear in the given order, make a call to {@code inOrder()}
* on the object returned by this method. The elements must appear in the given order within the
* actual iterable, but they are not required to be consecutive.
*/
@CanIgnoreReturnValue
public Ordered containsAtLeastElementsIn(Iterable<? extends E> expected) {
if (actual == null) {
failWithActual(
factsBuilder()
.add(fact("expected an iterable that contains at least", expected))
.addAll(correspondence.describeForIterable())
.build());
return ALREADY_FAILED;
}
List<A> actualList = iterableToList(castActual(actual));
List<? extends E> expectedList = iterableToList(expected);
// Check if the expected elements correspond in order to any subset of the actual elements.
// This allows the common case of a passing test using inOrder() to complete in linear time.
if (correspondInOrderAllIn(actualList.iterator(), expectedList.iterator())) {
return IN_ORDER;
}
// We know they don't correspond in order, so we're going to have to do an any-order test.
// Find a many:many mapping between the indexes of the elements which correspond, and check
// it for completeness.
Correspondence.ExceptionStore exceptions = Correspondence.ExceptionStore.forIterable();
ImmutableSetMultimap<Integer, Integer> candidateMapping =
findCandidateMapping(actualList, expectedList, exceptions);
if (failIfCandidateMappingHasMissing(
actualList, expectedList, candidateMapping, exceptions)) {
return ALREADY_FAILED;
}
// We know that every expected element maps to at least one actual element, and vice versa.
// Find a maximal 1:1 mapping, and check it for completeness.
ImmutableBiMap<Integer, Integer> maximalOneToOneMapping =
findMaximalOneToOneMapping(candidateMapping);
if (failIfOneToOneMappingHasMissing(
actualList, expectedList, maximalOneToOneMapping, exceptions)) {
return ALREADY_FAILED;
}
// Check whether we caught any exceptions from Correspondence.compare. As with
// containsExactlyElementIn, we do the any-order assertions treating exceptions as if false
// was returned before this, but we are contractually obliged to throw here if the assertions
// passed.
if (exceptions.hasCompareException()) {
failWithoutActual(
factsBuilder()
.addAll(exceptions.describeAsMainCause())
.add(fact("expected to contain at least", expected))
.addAll(correspondence.describeForIterable())
.add(simpleFact("found all expected elements (but failing because of exception)"))
.add(fullContents())
.build());
return ALREADY_FAILED;
}
// The 1:1 mapping maps all the expected elements, so the test succeeds (but we know from
// above that the mapping is not in order).
return () ->
failWithActual(
factsBuilder()
.add(simpleFact("required elements were all found, but order was wrong"))
.add(fact("expected order for required elements", expected))
.addAll(correspondence.describeForIterable())
.build());
}
/**
* Checks that the actual iterable contains elements that correspond to all the expected
* elements, i.e. that there is a 1:1 mapping between any subset of the actual elements and the
* expected elements where each pair of elements correspond.
*
* <p>To also test that the contents appear in the given order, make a call to {@code inOrder()}
* on the object returned by this method. The elements must appear in the given order within the
* actual iterable, but they are not required to be consecutive.
*/
@CanIgnoreReturnValue
@SuppressWarnings("AvoidObjectArrays")
public Ordered containsAtLeastElementsIn(E @Nullable [] expected) {
if (expected == null) {
failWithoutActual(
simpleFact("could not perform containment check because expected array was null"),
actualContents());
return ALREADY_FAILED;
}
return containsAtLeastElementsIn(asList(expected));
}
/**
* Returns whether all the elements of the expected iterator and any subset of the elements of
* the actual iterator can be paired up in order, such that every pair of actual and expected
* elements satisfies the correspondence. Returns false if any comparison threw an exception.
*/
private boolean correspondInOrderAllIn(
Iterator<? extends A> actual, Iterator<? extends E> expected) {
// We take a greedy approach here, iterating through the expected elements and pairing each
// with the first applicable actual element. This is fine for the in-order test, since there's
// no way that paring an expected element with a later actual element permits a solution which
// couldn't be achieved by pairing it with the first. (For the any-order test, we may want to
// pair an expected element with a later actual element so that we can pair the earlier actual
// element with a later expected element, but that doesn't apply here.)
Correspondence.ExceptionStore exceptions = Correspondence.ExceptionStore.forIterable();
while (expected.hasNext()) {
E expectedElement = expected.next();
// Return false if we couldn't find the expected exception, or if the correspondence threw
// an exception. We'll fall back on the any-order assertion in this case.
if (!findCorresponding(actual, expectedElement, exceptions)
|| exceptions.hasCompareException()) {
return false;
}
}
return true;
}
/**
* Advances the actual iterator looking for an element which corresponds to the expected
* element. Returns whether or not it finds one.
*/
private boolean findCorresponding(
Iterator<? extends A> actual, E expectedElement, Correspondence.ExceptionStore exceptions) {
while (actual.hasNext()) {
A actualElement = actual.next();
if (correspondence.safeCompare(actualElement, expectedElement, exceptions)) {
return true;
}
}
return false;
}
/**
* Given a list of actual elements, a list of expected elements, and a many:many mapping between
* actual and expected elements specified as a multimap of indexes into the actual list to
* indexes into the expected list, checks that every expected element maps to at least one
* actual element, and fails if this is not the case. Actual elements which do not map to any
* expected elements are ignored.
*/
private boolean failIfCandidateMappingHasMissing(
List<? extends A> actual,
List<? extends E> expected,
ImmutableSetMultimap<Integer, Integer> mapping,
Correspondence.ExceptionStore exceptions) {
List<? extends E> missing = findNotIndexed(expected, mapping.inverse().keySet());
if (!missing.isEmpty()) {
List<? extends A> extra = findNotIndexed(actual, mapping.keySet());
failWithoutActual(
factsBuilder()
.addAll(describeMissing(missing, extra, exceptions))
.add(fact("expected to contain at least", expected))
.addAll(correspondence.describeForIterable())
.add(butWas())
.addAll(exceptions.describeAsAdditionalInfo())
.build());
return true;
}
return false;
}
/**
* Given a list of missing elements, which must be non-empty, and a list of extra elements,
* returns a list of facts describing the missing elements, diffing against the extra ones where
* appropriate.
*/
private ImmutableList<Fact> describeMissing(
List<? extends E> missing,
List<? extends A> extra,
Correspondence.ExceptionStore exceptions) {
if (pairer != null) {
Pairing<A, E> pairing = pairer.pair(missing, extra, exceptions);
if (pairing != null) {
return describeMissingWithPairing(pairing, exceptions);
} else {
return factsBuilder()
.addAll(describeMissingWithoutPairing(missing))
.add(
simpleFact(
"a key function which does not uniquely key the expected elements was"
+ " provided and has consequently been ignored"))
.build();
}
} else {
// N.B. For containsAny, we do not treat having exactly one missing element as a special
// case (as we do for containsExactly). Showing extra elements has lower utility for
// containsAny (because they are allowed by the assertion) so we only show them if the user
// has explicitly opted in by specifying a pairing.
return describeMissingWithoutPairing(missing);
}
}
private ImmutableList<Fact> describeMissingWithoutPairing(List<? extends E> missing) {
return makeElementFactsForBoth("missing", missing, "unexpected", ImmutableList.of());
}
private ImmutableList<Fact> describeMissingWithPairing(
Pairing<A, E> pairing, Correspondence.ExceptionStore exceptions) {
ImmutableList.Builder<Fact> facts = factsBuilder();
for (Object key : pairing.pairedKeysToExpectedValues.keySet()) {
E missing = pairing.pairedKeysToExpectedValues.get(key);
List<A> extras = pairing.pairedKeysToActualValues.get(key);
facts.add(fact("for key", key));
facts.add(fact("missing", missing));
facts.addAll(
formatExtras("did contain elements with that key", missing, extras, exceptions));
facts.add(simpleFact("---"));
}
if (!pairing.unpairedExpectedValues.isEmpty()) {
facts.add(simpleFact("elements without matching keys:"));
facts.addAll(describeMissingWithoutPairing(pairing.unpairedExpectedValues));
}
return facts.build();
}
/**
* Given a list of expected elements, and a 1:1 mapping between actual and expected elements
* specified as a bimap of indexes into the actual list to indexes into the expected list,
* checks that every expected element maps to an actual element. Actual elements which do not
* map to any expected elements are ignored.
*/
private boolean failIfOneToOneMappingHasMissing(
List<? extends A> actual,
List<? extends E> expected,
BiMap<Integer, Integer> mapping,
Correspondence.ExceptionStore exceptions) {
List<? extends E> missing = findNotIndexed(expected, mapping.values());
if (!missing.isEmpty()) {
List<? extends A> extra = findNotIndexed(actual, mapping.keySet());
failWithoutActual(
factsBuilder()
.add(
simpleFact(
"in an assertion requiring a 1:1 mapping between the expected and a subset"
+ " of the actual elements, each actual element matches as least one"
+ " expected element, and vice versa, but there was no 1:1 mapping"))
.add(
simpleFact(
"using the most complete 1:1 mapping (or one such mapping, if there is a"
+ " tie)"))
.addAll(describeMissing(missing, extra, exceptions))
.add(fact("expected to contain at least", expected))
.addAll(correspondence.describeForIterable())
.add(butWas())
.addAll(exceptions.describeAsAdditionalInfo())
.build());
return true;
}
return false;
}
/**
* Checks that the actual iterable contains at least one element that corresponds to at least
* one of the expected elements.
*/
@SafeVarargs
public final void containsAnyOf(E first, E second, E @Nullable ... rest) {
containsAnyIn(accumulate(first, second, rest));
}
/**
* Checks that the actual iterable contains at least one element that corresponds to at least
* one of the expected elements.
*/
public void containsAnyIn(Iterable<? extends E> expected) {
if (actual == null) {
failWithActual(
factsBuilder()
.add(fact("expected an iterable that contains any of", expected))
.addAll(correspondence.describeForIterable())
.build());
return;
}
Collection<A> actual = iterableToCollection(castActual(this.actual));
Correspondence.ExceptionStore exceptions = Correspondence.ExceptionStore.forIterable();
for (E expectedItem : expected) {
for (A actualItem : actual) {
if (correspondence.safeCompare(actualItem, expectedItem, exceptions)) {
// Found a match, but we still need to fail if we hit an exception along the way.
if (exceptions.hasCompareException()) {
failWithoutActual(
factsBuilder()
.addAll(exceptions.describeAsMainCause())
.add(fact("expected to contain any of", expected))
.addAll(correspondence.describeForIterable())
.add(simpleFact("found match (but failing because of exception)"))
.add(fullContents())
.build());
}
return;
}
}
}
// Found no match. Fail, reporting elements that have a correct key if there are any.
if (pairer != null) {
Pairing<A, E> pairing =
pairer.pair(iterableToList(expected), iterableToList(actual), exceptions);
if (pairing != null) {
if (!pairing.pairedKeysToExpectedValues.isEmpty()) {
failWithoutActual(
factsBuilder()
.add(fact("expected to contain any of", expected))
.addAll(correspondence.describeForIterable())
.add(butWas())
.addAll(describeAnyMatchesByKey(pairing, exceptions))
.addAll(exceptions.describeAsAdditionalInfo())
.build());
} else {
failWithoutActual(
factsBuilder()
.add(fact("expected to contain any of", expected))
.addAll(correspondence.describeForIterable())
.add(butWas())
.add(simpleFact("it does not contain any matches by key, either"))
.addAll(exceptions.describeAsAdditionalInfo())
.build());
}
} else {
failWithoutActual(
factsBuilder()
.add(fact("expected to contain any of", expected))
.addAll(correspondence.describeForIterable())
.add(butWas())
.add(
simpleFact(
"a key function which does not uniquely key the expected elements was"
+ " provided and has consequently been ignored"))
.addAll(exceptions.describeAsAdditionalInfo())
.build());
}
} else {
failWithoutActual(
factsBuilder()
.add(fact("expected to contain any of", expected))
.addAll(correspondence.describeForIterable())
.add(butWas())
.addAll(exceptions.describeAsAdditionalInfo())
.build());
}
}
/**
* Checks that the actual iterable contains at least one element that corresponds to at least
* one of the expected elements.
*/
@SuppressWarnings("AvoidObjectArrays")
public void containsAnyIn(E @Nullable [] expected) {
if (expected == null) {
failWithoutActual(
simpleFact("could not perform containment check because expected array was null"),
actualContents());
return;
}
containsAnyIn(asList(expected));
}
private ImmutableList<Fact> describeAnyMatchesByKey(
Pairing<A, E> pairing, Correspondence.ExceptionStore exceptions) {
ImmutableList.Builder<Fact> facts = factsBuilder();
for (Object key : pairing.pairedKeysToExpectedValues.keySet()) {
E expected = pairing.pairedKeysToExpectedValues.get(key);
List<A> got = pairing.pairedKeysToActualValues.get(key);
facts.add(fact("for key", key));
facts.add(fact("expected any of", expected));
facts.addAll(formatExtras("but got", expected, got, exceptions));
facts.add(simpleFact("---"));
}
return facts.build();
}
/**
* Checks that the actual iterable contains no elements that correspond to any of the given
* elements.
*/
@SafeVarargs
public final void containsNoneOf(E first, E second, E @Nullable ... rest) {
containsNoneIn(accumulate(first, second, rest));
}
/**
* Checks that the actual iterable contains no elements that correspond to any of the given
* elements.
*/
@SuppressWarnings("nullness") // TODO: b/423853632 - Remove after checker is fixed.
public void containsNoneIn(Iterable<? extends E> excluded) {
if (actual == null) {
failWithActual(
factsBuilder()
.add(fact("expected an iterable that does not contain any of", excluded))
.addAll(correspondence.describeForIterable())
.build());
return;
}
Collection<A> actual = iterableToCollection(castActual(this.actual));
ListMultimap<E, A> present = LinkedListMultimap.create();
Correspondence.ExceptionStore exceptions = Correspondence.ExceptionStore.forIterable();
for (E excludedItem : Sets.newLinkedHashSet(excluded)) {
for (A actualItem : actual) {
if (correspondence.safeCompare(actualItem, excludedItem, exceptions)) {
present.put(excludedItem, actualItem);
}
}
}
// Fail if we found any matches.
if (!present.isEmpty()) {
ImmutableList.Builder<Fact> facts = factsBuilder();
facts.add(fact("expected not to contain any of", annotateEmptyStrings(excluded)));
facts.addAll(correspondence.describeForIterable());
for (E excludedItem : present.keySet()) {
List<A> actualItems = present.get(excludedItem);
facts.add(fact("but contained", annotateEmptyStrings(actualItems)));
facts.add(fact("corresponding to", excludedItem));
facts.add(simpleFact("---"));
}
facts.add(fullContents());
facts.addAll(exceptions.describeAsAdditionalInfo());
failWithoutActual(facts.build());
return;
}
// Found no match, but we still need to fail if we hit an exception along the way.
if (exceptions.hasCompareException()) {
failWithoutActual(
factsBuilder()
.addAll(exceptions.describeAsMainCause())
.add(fact("expected not to contain any of", annotateEmptyStrings(excluded)))
.addAll(correspondence.describeForIterable())
.add(simpleFact("found no matches (but failing because of exception)"))
.add(fullContents())
.build());
}
}
/**
* Checks that the subject contains no elements that correspond to any of the given elements.
*/
@SuppressWarnings("AvoidObjectArrays")
public void containsNoneIn(E @Nullable [] excluded) {
if (excluded == null) {
failWithoutActual(
simpleFact("could not perform containment check because excluded array was null"),
actualContents());
return;
}
containsNoneIn(asList(excluded));
}
@SuppressWarnings("unchecked") // throwing ClassCastException is the correct behaviour
private Iterable<A> castActual(Iterable<?> actual) {
return (Iterable<A>) actual;
}
// TODO(b/69154276): Consider commoning up some of the logic between IterableSubject.Pairer,
// MapSubject.MapDifference, and MultimapSubject.difference(). We are likely to need something
// similar again when we do the work to improve the failure messages from
// MultimapSubject.UsingCorrespondence (because it won't be able to delegate to
// IterableSubject.UsingCorrespondence like it does now). So it makes sense to do the
// refactoring as part of that. Right now, we don't even know what Multimap is going to need.
/**
* A
|
UsingCorrespondence
|
java
|
apache__kafka
|
streams/src/test/java/org/apache/kafka/streams/kstream/internals/KTableAggregateTest.java
|
{
"start": 19265,
"end": 23095
}
|
class ____ implements Serde<NoEqualsImpl> {
@Override
public Serializer<NoEqualsImpl> serializer() {
return (topic, data) -> data == null ? null : data.x.getBytes(StandardCharsets.UTF_8);
}
@Override
public Deserializer<NoEqualsImpl> deserializer() {
return (topic, data) -> data == null ? null : new NoEqualsImpl(new String(data, StandardCharsets.UTF_8));
}
}
// `NoEqualsImpl` doesn't implement `equals` but we can still compare two `NoEqualsImpl` instances by comparing their underlying `x` field
private List<TestRecord<String, Long>> toComparableList(final List<TestRecord<NoEqualsImpl, Long>> list) {
final List<TestRecord<String, Long>> comparableList = new ArrayList<>();
list.forEach(tr -> comparableList.add(new TestRecord<>(tr.key().getX(), tr.value(), Instant.ofEpochMilli(tr.timestamp()))));
return comparableList;
}
private void testKeyWithNoEquals(
final KeyValueMapper<NoEqualsImpl, NoEqualsImpl, KeyValue<NoEqualsImpl, NoEqualsImpl>> keyValueMapper,
final List<TestRecord<NoEqualsImpl, Long>> expected) {
final StreamsBuilder builder = new StreamsBuilder();
final String input = "input-topic";
final String output = "output-topic";
final Serde<NoEqualsImpl> noEqualsImplSerde = new NoEqualsImplSerde();
builder
.table(input, Consumed.with(noEqualsImplSerde, noEqualsImplSerde))
.groupBy(keyValueMapper, Grouped.with(noEqualsImplSerde, noEqualsImplSerde))
.count()
.toStream()
.to(output);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), CONFIG, Instant.ofEpochMilli(0L))) {
final TestInputTopic<NoEqualsImpl, NoEqualsImpl> inputTopic =
driver.createInputTopic(input, noEqualsImplSerde.serializer(), noEqualsImplSerde.serializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestOutputTopic<NoEqualsImpl, Long> outputTopic =
driver.createOutputTopic(output, noEqualsImplSerde.deserializer(), new LongDeserializer());
final NoEqualsImpl a = new NoEqualsImpl("1");
final NoEqualsImpl b = new NoEqualsImpl("1");
assertNotEquals(a, b);
assertNotSame(a, b);
inputTopic.pipeInput(a, a, 8);
inputTopic.pipeInput(b, b, 9);
final List<TestRecord<String, Long>> actualComparable = toComparableList(outputTopic.readRecordsToList());
final List<TestRecord<String, Long>> expectedComparable = toComparableList(expected);
assertEquals(expectedComparable, actualComparable);
}
}
@Test
public void testNoEqualsAndNotSameObject() {
testKeyWithNoEquals(
// key changes, different object reference (deserializer returns a new object reference)
(k, v) -> new KeyValue<>(v, v),
asList(
new TestRecord<>(new NoEqualsImpl("1"), 1L, Instant.ofEpochMilli(8)),
new TestRecord<>(new NoEqualsImpl("1"), 0L, Instant.ofEpochMilli(9)), // transient inconsistent state
new TestRecord<>(new NoEqualsImpl("1"), 1L, Instant.ofEpochMilli(9))
)
);
}
@Test
public void testNoEqualsAndSameObject() {
testKeyWithNoEquals(
// key does not change, same object reference
KeyValue::new,
asList(
new TestRecord<>(new NoEqualsImpl("1"), 1L, Instant.ofEpochMilli(8)),
new TestRecord<>(new NoEqualsImpl("1"), 1L, Instant.ofEpochMilli(9))
)
);
}
}
|
NoEqualsImplSerde
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/postgresql/ast/stmt/PGDropSchemaStatement.java
|
{
"start": 1012,
"end": 2488
}
|
class ____ extends SQLStatementImpl implements PGSQLStatement, SQLDropStatement {
private SQLName schemaName;
private List<SQLName> multipleNames = new ArrayList<>();
private boolean ifExists;
private boolean cascade;
private boolean restrict;
public SQLName getSchemaName() {
return this.schemaName;
}
public void setSchemaName(SQLName schemaName) {
this.schemaName = schemaName;
}
public List<SQLName> getMultipleNames() {
return this.multipleNames;
}
public void setMultipleNames(List<SQLName> multipleNames) {
this.multipleNames = multipleNames;
}
public boolean isIfExists() {
return ifExists;
}
public void setIfExists(boolean ifExists) {
this.ifExists = ifExists;
}
public boolean isCascade() {
return cascade;
}
public void setCascade(boolean cascade) {
this.cascade = cascade;
}
public boolean isRestrict() {
return restrict;
}
public void setRestrict(boolean restrict) {
this.restrict = restrict;
}
protected void accept0(SQLASTVisitor visitor) {
if (visitor instanceof PGASTVisitor) {
accept0((PGASTVisitor) visitor);
}
}
@Override
public void accept0(PGASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, this.schemaName);
}
visitor.endVisit(this);
}
}
|
PGDropSchemaStatement
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/config/Configurator.java
|
{
"start": 1515,
"end": 1685
}
|
class ____ several ways to construct a LoggerContext using
* the location of a configuration file, a context name, and various optional parameters.
*/
public final
|
provides
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/response/IbmWatsonxRankedResponseEntityTests.java
|
{
"start": 899,
"end": 6558
}
|
class ____ extends ESTestCase {
public void testResponseLiteral() throws IOException {
InferenceServiceResults parsedResults = IbmWatsonxRankedResponseEntity.fromResponse(
new HttpResult(mock(HttpResponse.class), responseLiteral.getBytes(StandardCharsets.UTF_8))
);
MatcherAssert.assertThat(parsedResults, instanceOf(RankedDocsResults.class));
List<RankedDocsResults.RankedDoc> expected = responseLiteralDocs();
for (int i = 0; i < ((RankedDocsResults) parsedResults).getRankedDocs().size(); i++) {
assertEquals(((RankedDocsResults) parsedResults).getRankedDocs().get(i).index(), expected.get(i).index());
}
}
public void testGeneratedResponse() throws IOException {
int numDocs = randomIntBetween(1, 10);
List<RankedDocsResults.RankedDoc> expected = new ArrayList<>(numDocs);
StringBuilder responseBuilder = new StringBuilder();
responseBuilder.append("{");
responseBuilder.append("\"results\": [");
List<Integer> indices = linear(numDocs);
List<Float> scores = linearFloats(numDocs);
for (int i = 0; i < numDocs; i++) {
int index = indices.remove(randomInt(indices.size() - 1));
responseBuilder.append("{");
responseBuilder.append("\"index\":").append(index).append(",");
responseBuilder.append("\"score\":").append(scores.get(i).toString()).append("}");
expected.add(new RankedDocsResults.RankedDoc(index, scores.get(i), null));
if (i < numDocs - 1) {
responseBuilder.append(",");
}
}
responseBuilder.append("]");
responseBuilder.append(randomIntBetween(1, 10)).append("}");
InferenceServiceResults parsedResults = IbmWatsonxRankedResponseEntity.fromResponse(
new HttpResult(mock(HttpResponse.class), responseBuilder.toString().getBytes(StandardCharsets.UTF_8))
);
MatcherAssert.assertThat(parsedResults, instanceOf(RankedDocsResults.class));
for (int i = 0; i < ((RankedDocsResults) parsedResults).getRankedDocs().size(); i++) {
assertEquals(((RankedDocsResults) parsedResults).getRankedDocs().get(i).index(), expected.get(i).index());
}
}
private ArrayList<RankedDocsResults.RankedDoc> responseLiteralDocs() {
var list = new ArrayList<RankedDocsResults.RankedDoc>();
list.add(new RankedDocsResults.RankedDoc(2, 0.98005307F, null));
list.add(new RankedDocsResults.RankedDoc(3, 0.27904198F, null));
list.add(new RankedDocsResults.RankedDoc(0, 0.10194652F, null));
return list;
}
private final String responseLiteral = """
{
"results": [
{
"index": 2,
"score": 0.98005307
},
{
"index": 3,
"score": 0.27904198
},
{
"index": 0,
"score": 0.10194652
}
]
}
""";
public void testResponseLiteralWithDocuments() throws IOException {
InferenceServiceResults parsedResults = IbmWatsonxRankedResponseEntity.fromResponse(
new HttpResult(mock(HttpResponse.class), responseLiteralWithDocuments.getBytes(StandardCharsets.UTF_8))
);
MatcherAssert.assertThat(parsedResults, instanceOf(RankedDocsResults.class));
MatcherAssert.assertThat(((RankedDocsResults) parsedResults).getRankedDocs(), is(responseLiteralDocsWithText));
}
private final String responseLiteralWithDocuments = """
{
"results": [
{
"input": {
"text": "Washington, D.C.."
},
"index": 2,
"score": 0.98005307
},
{
"input": {
"text": "Capital punishment has existed in the United States since before the United States was a country. "
},
"index": 3,
"score": 0.27904198
},
{
"input": {
"text": "Carson City is the capital city of the American state of Nevada."
},
"index": 0,
"score": 0.10194652
}
]
}
""";
private final List<RankedDocsResults.RankedDoc> responseLiteralDocsWithText = List.of(
new RankedDocsResults.RankedDoc(2, 0.98005307F, "Washington, D.C.."),
new RankedDocsResults.RankedDoc(
3,
0.27904198F,
"Capital punishment has existed in the United States since before the United States was a country. "
),
new RankedDocsResults.RankedDoc(0, 0.10194652F, "Carson City is the capital city of the American state of Nevada.")
);
private ArrayList<Integer> linear(int n) {
ArrayList<Integer> list = new ArrayList<>();
for (int i = 0; i <= n; i++) {
list.add(i);
}
return list;
}
// creates a list of doubles of monotonically decreasing magnitude
private ArrayList<Float> linearFloats(int n) {
ArrayList<Float> list = new ArrayList<>();
float startValue = 1.0f;
float decrement = startValue / n + 1;
for (int i = 0; i <= n; i++) {
list.add(startValue - (i * decrement));
}
return list;
}
}
|
IbmWatsonxRankedResponseEntityTests
|
java
|
google__dagger
|
javatests/artifacts/dagger/build-tests/src/test/java/buildtests/TransitiveProvidesScopeTest.java
|
{
"start": 5622,
"end": 5976
}
|
interface ____ {",
" String string();",
"}")
.addSrcFile(
"MyModule.java",
"package library1;",
"",
"import dagger.Module;",
"import dagger.Provides;",
"import library2.MyScope;",
"",
"@Module",
"public
|
MySubcomponent
|
java
|
elastic__elasticsearch
|
x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/action/MountSearchableSnapshotRequestTests.java
|
{
"start": 1233,
"end": 10765
}
|
class ____ extends AbstractWireSerializingTestCase<MountSearchableSnapshotRequest> {
private MountSearchableSnapshotRequest randomState(MountSearchableSnapshotRequest instance) {
return new MountSearchableSnapshotRequest(
TEST_REQUEST_TIMEOUT,
randomBoolean() ? instance.mountedIndexName() : mutateString(instance.mountedIndexName()),
randomBoolean() ? instance.repositoryName() : mutateString(instance.repositoryName()),
randomBoolean() ? instance.snapshotName() : mutateString(instance.snapshotName()),
randomBoolean() ? instance.snapshotIndexName() : mutateString(instance.snapshotIndexName()),
randomBoolean() ? instance.indexSettings() : mutateSettings(instance.indexSettings()),
randomBoolean() ? instance.ignoreIndexSettings() : mutateStringArray(instance.ignoreIndexSettings()),
randomBoolean(),
randomFrom(MountSearchableSnapshotRequest.Storage.values())
).masterNodeTimeout(randomBoolean() ? instance.masterNodeTimeout() : mutateTimeValue(instance.masterNodeTimeout()));
}
@Override
protected MountSearchableSnapshotRequest createTestInstance() {
return randomState(
new MountSearchableSnapshotRequest(
TEST_REQUEST_TIMEOUT,
randomAlphaOfLength(5),
randomAlphaOfLength(5),
randomAlphaOfLength(5),
randomAlphaOfLength(5),
Settings.EMPTY,
Strings.EMPTY_ARRAY,
randomBoolean(),
randomFrom(MountSearchableSnapshotRequest.Storage.values())
)
);
}
@Override
protected Writeable.Reader<MountSearchableSnapshotRequest> instanceReader() {
return MountSearchableSnapshotRequest::new;
}
@Override
protected MountSearchableSnapshotRequest mutateInstance(MountSearchableSnapshotRequest req) {
return switch (randomInt(8)) {
case 0 -> new MountSearchableSnapshotRequest(
TEST_REQUEST_TIMEOUT,
mutateString(req.mountedIndexName()),
req.repositoryName(),
req.snapshotName(),
req.snapshotIndexName(),
req.indexSettings(),
req.ignoreIndexSettings(),
req.waitForCompletion(),
req.storage()
).masterNodeTimeout(req.masterNodeTimeout());
case 1 -> new MountSearchableSnapshotRequest(
TEST_REQUEST_TIMEOUT,
req.mountedIndexName(),
mutateString(req.repositoryName()),
req.snapshotName(),
req.snapshotIndexName(),
req.indexSettings(),
req.ignoreIndexSettings(),
req.waitForCompletion(),
req.storage()
).masterNodeTimeout(req.masterNodeTimeout());
case 2 -> new MountSearchableSnapshotRequest(
TEST_REQUEST_TIMEOUT,
req.mountedIndexName(),
req.repositoryName(),
mutateString(req.snapshotName()),
req.snapshotIndexName(),
req.indexSettings(),
req.ignoreIndexSettings(),
req.waitForCompletion(),
req.storage()
).masterNodeTimeout(req.masterNodeTimeout());
case 3 -> new MountSearchableSnapshotRequest(
TEST_REQUEST_TIMEOUT,
req.mountedIndexName(),
req.repositoryName(),
req.snapshotName(),
mutateString(req.snapshotIndexName()),
req.indexSettings(),
req.ignoreIndexSettings(),
req.waitForCompletion(),
req.storage()
).masterNodeTimeout(req.masterNodeTimeout());
case 4 -> new MountSearchableSnapshotRequest(
TEST_REQUEST_TIMEOUT,
req.mountedIndexName(),
req.repositoryName(),
req.snapshotName(),
req.snapshotIndexName(),
mutateSettings(req.indexSettings()),
req.ignoreIndexSettings(),
req.waitForCompletion(),
req.storage()
).masterNodeTimeout(req.masterNodeTimeout());
case 5 -> new MountSearchableSnapshotRequest(
TEST_REQUEST_TIMEOUT,
req.mountedIndexName(),
req.repositoryName(),
req.snapshotName(),
req.snapshotIndexName(),
req.indexSettings(),
mutateStringArray(req.ignoreIndexSettings()),
req.waitForCompletion(),
req.storage()
).masterNodeTimeout(req.masterNodeTimeout());
case 6 -> new MountSearchableSnapshotRequest(
TEST_REQUEST_TIMEOUT,
req.mountedIndexName(),
req.repositoryName(),
req.snapshotName(),
req.snapshotIndexName(),
req.indexSettings(),
req.ignoreIndexSettings(),
req.waitForCompletion() == false,
req.storage()
).masterNodeTimeout(req.masterNodeTimeout());
case 7 -> new MountSearchableSnapshotRequest(
TEST_REQUEST_TIMEOUT,
req.mountedIndexName(),
req.repositoryName(),
req.snapshotName(),
req.snapshotIndexName(),
req.indexSettings(),
req.ignoreIndexSettings(),
req.waitForCompletion(),
randomValueOtherThan(req.storage(), () -> randomFrom(MountSearchableSnapshotRequest.Storage.values()))
).masterNodeTimeout(req.masterNodeTimeout());
default -> new MountSearchableSnapshotRequest(
TEST_REQUEST_TIMEOUT,
req.mountedIndexName(),
req.repositoryName(),
req.snapshotName(),
req.snapshotIndexName(),
req.indexSettings(),
req.ignoreIndexSettings(),
req.waitForCompletion(),
req.storage()
).masterNodeTimeout(mutateTimeValue(req.masterNodeTimeout()));
};
}
private static TimeValue mutateTimeValue(TimeValue timeValue) {
long millis = timeValue.millis();
long newMillis = randomValueOtherThan(millis, () -> randomLongBetween(0, 60000));
return TimeValue.timeValueMillis(newMillis);
}
private static String mutateString(String string) {
return randomAlphaOfLength(11 - string.length());
}
private static Settings mutateSettings(Settings settings) {
if (settings.size() < 5 && (settings.isEmpty() || randomBoolean())) {
return Settings.builder().put(settings).put(randomAlphaOfLength(3), randomAlphaOfLength(3)).build();
} else {
return Settings.EMPTY;
}
}
private static String[] mutateStringArray(String[] strings) {
if (strings.length < 5 && (strings.length == 0 || randomBoolean())) {
String[] newStrings = Arrays.copyOf(strings, strings.length + 1);
newStrings[strings.length] = randomAlphaOfLength(3);
return newStrings;
} else if (randomBoolean()) {
String[] newStrings = Arrays.copyOf(strings, strings.length);
int i = randomIntBetween(0, newStrings.length - 1);
newStrings[i] = mutateString(newStrings[i]);
return newStrings;
} else {
return Strings.EMPTY_ARRAY;
}
}
public void testForbidsCustomDataPath() {
final ActionRequestValidationException validationException = new MountSearchableSnapshotRequest(
TEST_REQUEST_TIMEOUT,
randomAlphaOfLength(5),
randomAlphaOfLength(5),
randomAlphaOfLength(5),
randomAlphaOfLength(5),
Settings.builder().put(IndexMetadata.SETTING_DATA_PATH, randomAlphaOfLength(5)).build(),
Strings.EMPTY_ARRAY,
randomBoolean(),
randomFrom(MountSearchableSnapshotRequest.Storage.values())
).validate();
assertThat(validationException.getMessage(), containsString(IndexMetadata.SETTING_DATA_PATH));
}
public void testParsesStorage() throws IOException {
assertStorageParser("full_copy", MountSearchableSnapshotRequest.Storage.FULL_COPY);
assertStorageParser("shared_cache", MountSearchableSnapshotRequest.Storage.SHARED_CACHE);
assertStorageParser("FULL_COPY", MountSearchableSnapshotRequest.Storage.FULL_COPY);
}
private static void assertStorageParser(String storageParam, MountSearchableSnapshotRequest.Storage expected) throws IOException {
final Map<String, String> params = new HashMap<>();
params.put("repository", "test");
params.put("snapshot", "test");
params.put("storage", storageParam);
final RestRequest restReq = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(params)
.withContent(new BytesArray("{\"index\":\"test\"}"), XContentType.JSON)
.build();
final MountSearchableSnapshotRequest mountReq = MountSearchableSnapshotRequest.PARSER.apply(restReq.contentParser(), restReq);
assertThat(mountReq.storage(), equalTo(expected));
}
}
|
MountSearchableSnapshotRequestTests
|
java
|
hibernate__hibernate-orm
|
hibernate-testing/src/main/java/org/hibernate/testing/orm/jdbc/PreparedStatementSpyConnectionProviderSettingProvider.java
|
{
"start": 233,
"end": 504
}
|
class ____ implements SettingProvider.Provider<PreparedStatementSpyConnectionProvider> {
@Override
public PreparedStatementSpyConnectionProvider getSetting() {
return new PreparedStatementSpyConnectionProvider();
}
}
|
PreparedStatementSpyConnectionProviderSettingProvider
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/RestPutQueryRulesetActionTests.java
|
{
"start": 869,
"end": 2857
}
|
class ____ extends AbstractRestEnterpriseSearchActionTests {
public void testWithNonCompliantLicense() throws Exception {
checkLicenseForRequest(
new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withMethod(RestRequest.Method.PUT)
.withParams(Map.of("ruleset_id", "ruleset-id"))
.withContent(new BytesArray("""
{
"ruleset_id": "ruleset-id",
"rules": [
{
"rule_id": "query-rule-id",
"type": "pinned",
"criteria": [
{
"type": "exact",
"metadata": "query_string",
"value": "elastic"
}
],
"actions":
{
"ids": [
"id1",
"id2"
]
}
}
]
}
"""), XContentType.JSON)
.build(),
LicenseUtils.Product.QUERY_RULES
);
}
public void testInvalidRequestWithNonCompliantLicense() throws Exception {
checkLicenseForRequest(
new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withMethod(RestRequest.Method.PUT)
.withParams(Map.of("invalid_param_name", "invalid_value"))
.withContent(new BytesArray("{}"), XContentType.JSON)
.build(),
LicenseUtils.Product.QUERY_RULES
);
}
@Override
protected EnterpriseSearchBaseRestHandler getRestAction(XPackLicenseState licenseState) {
return new RestPutQueryRulesetAction(licenseState);
}
}
|
RestPutQueryRulesetActionTests
|
java
|
processing__processing4
|
core/src/processing/core/PApplet.java
|
{
"start": 213055,
"end": 331333
}
|
class ____ its methods like <b>readLine()</b> and
* <b>close</b> used in the above example, please consult a Java
* reference.<br />
* <br />
* Starting with Processing release 0134, all files loaded and saved by the
* Processing API use UTF-8 encoding. In previous releases, the default
* encoding for your platform was used, which causes problems when files are
* moved to other platforms.
*
* @webref input:files
* @webBrief Creates a <b>BufferedReader</b> object that can be used to read
* files line-by-line as individual <b>String</b> objects
* @param filename
* name of the file to be opened
* @see BufferedReader
* @see PApplet#createWriter(String)
* @see PrintWriter
*/
public BufferedReader createReader(String filename) {
InputStream is = createInput(filename);
if (is == null) {
System.err.println("The file \"" + filename + "\" " +
"is missing or inaccessible, make sure " +
"the URL is valid or that the file has been " +
"added to your sketch and is readable.");
return null;
}
return createReader(is);
}
/**
* @nowebref
*/
static public BufferedReader createReader(File file) {
try {
InputStream is = new FileInputStream(file);
if (file.getName().toLowerCase().endsWith(".gz")) {
is = new GZIPInputStream(is);
}
return createReader(is);
} catch (IOException e) {
// Re-wrap rather than forcing novices to learn about exceptions
throw new RuntimeException(e);
}
}
/**
* @nowebref
* I want to read lines from a stream. If I have to type the
* following lines anymore I'm gonna send Sun my medical bills.
*/
static public BufferedReader createReader(InputStream input) {
InputStreamReader isr =
new InputStreamReader(input, StandardCharsets.UTF_8);
BufferedReader reader = new BufferedReader(isr);
// consume the Unicode BOM (byte order marker) if present
try {
reader.mark(1);
int c = reader.read();
// if not the BOM, back up to the beginning again
if (c != '\uFEFF') {
reader.reset();
}
} catch (IOException e) {
e.printStackTrace();
}
return reader;
}
/**
*
* Creates a new file in the sketch folder, and a <b>PrintWriter</b> object
* to write to it. For the file to be made correctly, it should be flushed
* and must be closed with its <b>flush()</b> and <b>close()</b> methods
* (see above example).
* <br/> <br/>
* Starting with Processing release 0134, all files loaded and saved by the
* Processing API use UTF-8 encoding. In previous releases, the default
* encoding for your platform was used, which causes problems when files
* are moved to other platforms.
*
* @webref output:files
* @webBrief Creates a new file in the sketch folder, and a <b>PrintWriter</b> object
* to write to it
* @param filename name of the file to be created
* @see PrintWriter
* @see PApplet#createReader
* @see BufferedReader
*/
public PrintWriter createWriter(String filename) {
return createWriter(saveFile(filename));
}
/**
* @nowebref
* I want to print lines to a file. I have RSI from typing these
* eight lines of code so many times.
*/
static public PrintWriter createWriter(File file) {
if (file == null) {
throw new RuntimeException("File passed to createWriter() was null");
}
try {
createPath(file); // make sure in-between folders exist
OutputStream output = new FileOutputStream(file);
if (file.getName().toLowerCase().endsWith(".gz")) {
output = new GZIPOutputStream(output);
}
return createWriter(output);
} catch (Exception e) {
throw new RuntimeException("Couldn't create a writer for " +
file.getAbsolutePath(), e);
}
}
/**
* @nowebref
* I want to print lines to a file. Why am I always explaining myself?
* It's the JavaSoft API engineers who need to explain themselves.
*/
static public PrintWriter createWriter(OutputStream output) {
BufferedOutputStream bos = new BufferedOutputStream(output, 8192);
OutputStreamWriter osw =
new OutputStreamWriter(bos, StandardCharsets.UTF_8);
return new PrintWriter(osw);
}
//////////////////////////////////////////////////////////////
// FILE INPUT
/**
* This is a function for advanced programmers to open a Java InputStream.
* It's useful if you want to use the facilities provided by PApplet to
* easily open files from the data folder or from a URL, but want an
* InputStream object so that you can use other parts of Java to take more
* control of how the stream is read.<br />
* <br />
* The filename passed in can be:<br />
* - A URL, for instance <b>openStream("http://processing.org/")</b><br />
* - A file in the sketch's <b>data</b> folder<br />
* - The full path to a file to be opened locally (when running as an
* application)<br />
* <br />
* If the requested item doesn't exist, <b>null</b> is returned. If not online,
* this will also check to see if the user is asking for a file whose name
* isn't properly capitalized. If capitalization is different, an error
* will be printed to the console. This helps prevent issues that appear
* when a sketch is exported to the web, where case sensitivity matters, as
* opposed to running from inside the Processing Development Environment on
* Windows or macOS, where case sensitivity is preserved but ignored.<br />
* <br />
* If the file ends with <b>.gz</b>, the stream will automatically be gzip
* decompressed. If you don't want the automatic decompression, use the
* related function <b>createInputRaw()</b>.
* <br />
* In earlier releases, this function was called <b>openStream()</b>.<br />
* <br />
*
*
* <h3>Advanced</h3>
* Simplified method to open a Java InputStream.
* <p>
* This method is useful if you want to use the facilities provided
* by PApplet to easily open things from the data folder or from a URL,
* but want an InputStream object so that you can use other Java
* methods to take more control of how the stream is read.
* <p>
* If the requested item doesn't exist, null is returned.
* (Prior to 0096, die() would be called, killing the sketch)
* <p>
* For 0096+, the "data" folder is exported intact with subfolders,
* and openStream() properly handles subdirectories from the data folder
* <p>
* If not online, this will also check to see if the user is asking
* for a file whose name isn't properly capitalized. This helps prevent
* issues when a sketch is exported to the web, where case sensitivity
* matters, as opposed to Windows and the macOS default where
* case sensitivity is preserved but ignored.
* <p>
* It is strongly recommended that libraries use this method to open
* data files, so that the loading sequence is handled in the same way
* as functions like loadBytes(), loadImage(), etc.
* <p>
* The filename passed in can be:
* <UL>
* <LI>A URL, for instance openStream("http://processing.org/");
* <LI>A file in the sketch's data folder
* <LI>Another file to be opened locally (when running as an application)
* </UL>
*
* @webref input:files
* @webBrief This is a function for advanced programmers to open a Java <b>InputStream</b>
* @param filename the name of the file to use as input
* @see PApplet#createOutput(String)
* @see PApplet#selectOutput(String,String)
* @see PApplet#selectInput(String,String)
*
*/
@SuppressWarnings("JavadocLinkAsPlainText")
public InputStream createInput(String filename) {
InputStream input = createInputRaw(filename);
if (input != null) {
// if it's gzip-encoded, automatically decode
final String lower = filename.toLowerCase();
if (lower.endsWith(".gz") || lower.endsWith(".svgz")) {
try {
// buffered has to go *around* the GZ, otherwise 25x slower
return new BufferedInputStream(new GZIPInputStream(input));
} catch (IOException e) {
printStackTrace(e);
}
} else {
return new BufferedInputStream(input);
}
}
return null;
}
/**
* Call openStream() without automatic gzip decompression.
*/
public InputStream createInputRaw(String filename) {
if (filename == null) return null;
if (sketchPath == null) {
System.err.println("The sketch path is not set.");
throw new RuntimeException("Files must be loaded inside setup() or after it has been called.");
}
if (filename.length() == 0) {
// an error will be called by the parent function
//System.err.println("The filename passed to openStream() was empty.");
return null;
}
// First check whether this looks like a URL
if (filename.contains(":")) { // at least smells like URL
try {
URL url = new URL(filename);
URLConnection conn = url.openConnection();
if (conn instanceof HttpURLConnection httpConn) {
// Will not handle a protocol change (see below)
httpConn.setInstanceFollowRedirects(true);
int response = httpConn.getResponseCode();
// Default won't follow HTTP -> HTTPS redirects for security reasons
// http://stackoverflow.com/a/1884427
if (response >= 300 && response < 400) {
String newLocation = httpConn.getHeaderField("Location");
return createInputRaw(newLocation);
}
return conn.getInputStream();
} else if (conn instanceof JarURLConnection) {
return url.openStream();
}
} catch (MalformedURLException mfue) {
// not a URL, that's fine
} catch (FileNotFoundException fnfe) {
// Added in 0119 b/c Java 1.5 throws FNFE when URL not available.
// https://download.processing.org/bugzilla/403.html
} catch (IOException e) {
// changed for 0117, shouldn't be throwing exception
printStackTrace(e);
//System.err.println("Error downloading from URL " + filename);
return null;
//throw new RuntimeException("Error downloading from URL " + filename);
}
}
InputStream stream;
// Moved this earlier than the getResourceAsStream() checks, because
// calling getResourceAsStream() on a directory lists its contents.
// https://download.processing.org/bugzilla/716.html
try {
// First see if it's in a data folder. This may fail by throwing
// a SecurityException. If so, this whole block will be skipped.
File file = new File(dataPath(filename));
if (!file.exists()) {
// next see if it's just in the sketch folder
file = sketchFile(filename);
}
if (file.isDirectory()) {
return null;
}
if (file.exists()) {
try {
// handle case sensitivity check
String filePath = file.getCanonicalPath();
String filenameActual = new File(filePath).getName();
// make sure there isn't a subfolder prepended to the name
String filenameShort = new File(filename).getName();
// if the actual filename is the same, but capitalized
// differently, warn the user.
//if (filenameActual.equalsIgnoreCase(filenameShort) &&
//!filenameActual.equals(filenameShort)) {
if (!filenameActual.equals(filenameShort)) {
throw new RuntimeException("This file is named " +
filenameActual + " not " +
filename + ". Rename the file " +
"or change your code.");
}
} catch (IOException ignored) { }
}
// if this file is ok, may as well just load it
return new FileInputStream(file);
// have to break these out because a general Exception might
// catch the RuntimeException being thrown above
} catch (IOException | SecurityException ignored) { }
// Using getClassLoader() prevents java from converting dots
// to slashes or requiring a slash at the beginning.
// (a slash as a prefix means that it'll load from the root of
// the jar, rather than trying to dig into the package location)
ClassLoader cl = getClass().getClassLoader();
// by default, data files are exported to the root path of the jar.
// (not the data folder) so check there first.
stream = cl.getResourceAsStream("data/" + filename);
if (stream != null) {
String cn = stream.getClass().getName();
// this is an irritation of sun's java plug-in, which will return
// a non-null stream for an object that doesn't exist. like all good
// things, this is probably introduced in java 1.5. awesome!
// https://download.processing.org/bugzilla/359.html
if (!cn.equals("sun.plugin.cache.EmptyInputStream")) {
return stream;
}
}
// When used with an online script, also need to check without the
// data folder, in case it's not in a subfolder called 'data'.
// https://download.processing.org/bugzilla/389.html
stream = cl.getResourceAsStream(filename);
if (stream != null) {
String cn = stream.getClass().getName();
if (!cn.equals("sun.plugin.cache.EmptyInputStream")) {
return stream;
}
}
try {
// attempt to load from a local file
try { // first try to catch any security exceptions
try {
return new FileInputStream(dataPath(filename));
} catch (IOException ignored) { }
try {
return new FileInputStream(sketchPath(filename));
} catch (Exception ignored) { }
try {
return new FileInputStream(filename);
} catch (IOException ignored) { }
} catch (SecurityException ignored) { } // online, whups
} catch (Exception e) {
printStackTrace(e);
}
return null;
}
/**
* @nowebref
*/
static public InputStream createInput(File file) {
if (file == null) {
throw new IllegalArgumentException("File passed to createInput() was null");
}
if (!file.exists()) {
System.err.println(file + " does not exist, createInput() will return null");
return null;
}
try {
InputStream input = new FileInputStream(file);
final String lower = file.getName().toLowerCase();
if (lower.endsWith(".gz") || lower.endsWith(".svgz")) {
return new BufferedInputStream(new GZIPInputStream(input));
}
return new BufferedInputStream(input);
} catch (IOException e) {
System.err.println("Could not createInput() for " + file);
e.printStackTrace();
return null;
}
}
/**
*
* Reads the contents of a file and places it in a byte array. If the name of
* the file is used as the parameter, as in the above example, the file must
* be loaded in the sketch's "data" directory/folder. <br />
* <br />
* Alternatively, the file maybe be loaded from anywhere on the local computer
* using an absolute path (something that starts with / on Unix and Linux, or
* a drive letter on Windows), or the filename parameter can be a URL for a
* file found on a network.<br />
* <br />
* If the file is not available or an error occurs, <b>null</b> will be
* returned and an error message will be printed to the console. The error
* message does not halt the program, however the <b>null</b> value may cause a
* NullPointerException if your code does not check whether the value returned
* is <b>null</b>.<br />
*
* @webref input:files
* @webBrief Reads the contents of a file or url and places it in a byte
* array
* @param filename
* name of a file in the data folder or a URL.
* @see PApplet#loadStrings(String)
* @see PApplet#saveStrings(String, String[])
* @see PApplet#saveBytes(String, byte[])
*
*/
public byte[] loadBytes(String filename) {
String lower = filename.toLowerCase();
// If it's not a .gz file, then we might be able to uncompress it into
// a fixed-size buffer, which should help speed because we won't have to
// reallocate and resize the target array each time it gets full.
if (!lower.endsWith(".gz")) {
// If this looks like a URL, try to load it that way. Use the fact that
// URL connections may have a content length header to size the array.
if (filename.contains(":")) { // at least smells like URL
InputStream input = null;
try {
URL url = new URL(filename);
URLConnection conn = url.openConnection();
int length = -1;
if (conn instanceof HttpURLConnection httpConn) {
// Will not handle a protocol change (see below)
httpConn.setInstanceFollowRedirects(true);
int response = httpConn.getResponseCode();
// Default won't follow HTTP -> HTTPS redirects for security reasons
// http://stackoverflow.com/a/1884427
if (response >= 300 && response < 400) {
String newLocation = httpConn.getHeaderField("Location");
return loadBytes(newLocation);
}
length = conn.getContentLength();
input = conn.getInputStream();
} else if (conn instanceof JarURLConnection) {
length = conn.getContentLength();
input = url.openStream();
}
if (input != null) {
byte[] buffer;
if (length != -1) {
buffer = new byte[length];
int count;
int offset = 0;
while ((count = input.read(buffer, offset, length - offset)) > 0) {
offset += count;
}
} else {
buffer = loadBytes(input);
}
input.close();
return buffer;
}
} catch (MalformedURLException mfue) {
// not a url, that's fine
} catch (FileNotFoundException fnfe) {
// Java 1.5+ throws FNFE when URL not available
// https://download.processing.org/bugzilla/403.html
} catch (IOException e) {
printStackTrace(e);
return null;
} finally {
if (input != null) {
try {
input.close();
} catch (IOException e) {
// just deal
}
}
}
}
}
InputStream is = createInput(filename);
if (is != null) {
byte[] outgoing = loadBytes(is);
try {
is.close();
} catch (IOException e) {
printStackTrace(e); // shouldn't happen
}
return outgoing;
}
System.err.println("The file \"" + filename + "\" " +
"is missing or inaccessible, make sure " +
"the URL is valid or that the file has been " +
"added to your sketch and is readable.");
return null;
}
/**
* @nowebref
*/
static public byte[] loadBytes(InputStream input) {
try {
ByteArrayOutputStream out = new ByteArrayOutputStream();
byte[] buffer = new byte[4096];
int bytesRead = input.read(buffer);
while (bytesRead != -1) {
out.write(buffer, 0, bytesRead);
bytesRead = input.read(buffer);
}
out.flush();
return out.toByteArray();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
/**
* @nowebref
*/
static public byte[] loadBytes(File file) {
if (!file.exists()) {
System.err.println(file + " does not exist, loadBytes() will return null");
return null;
}
try {
InputStream input;
int length;
if (file.getName().toLowerCase().endsWith(".gz")) {
RandomAccessFile raf = new RandomAccessFile(file, "r");
raf.seek(raf.length() - 4);
int b4 = raf.read();
int b3 = raf.read();
int b2 = raf.read();
int b1 = raf.read();
length = (b1 << 24) | (b2 << 16) + (b3 << 8) + b4;
raf.close();
// buffered has to go *around* the GZ, otherwise 25x slower
input = new BufferedInputStream(new GZIPInputStream(new FileInputStream(file)));
} else {
long len = file.length();
// http://stackoverflow.com/a/3039805
int maxArraySize = Integer.MAX_VALUE - 5;
if (len > maxArraySize) {
System.err.println("Cannot use loadBytes() on a file larger than " + maxArraySize);
return null;
}
length = (int) len;
input = new BufferedInputStream(new FileInputStream(file));
}
byte[] buffer = new byte[length];
int count;
int offset = 0;
// count will come back 0 when complete (or -1 if somehow going long?)
while ((count = input.read(buffer, offset, length - offset)) > 0) {
offset += count;
}
input.close();
return buffer;
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
/**
* @nowebref
*/
static public String[] loadStrings(File file) {
if (!file.exists()) {
System.err.println(file + " does not exist, loadStrings() will return null");
return null;
}
InputStream is = createInput(file);
if (is != null) {
String[] outgoing = loadStrings(is);
try {
is.close();
} catch (IOException e) {
e.printStackTrace();
}
return outgoing;
}
return null;
}
/**
*
* Reads the contents of a file and creates a String array of its individual
* lines. If the name of the file is used as the parameter, as in the above
* example, the file must be loaded in the sketch's "data" directory/folder.
* <br />
* <br />
* Alternatively, the file maybe be loaded from anywhere on the local computer
* using an absolute path (something that starts with / on Unix and Linux, or
* a drive letter on Windows), or the filename parameter can be a URL for a
* file found on a network.<br />
* <br />
* If the file is not available or an error occurs, <b>null</b> will be
* returned and an error message will be printed to the console. The error
* message does not halt the program, however the <b>null</b> value may cause a
* NullPointerException if your code does not check whether the value returned
* is <b>null</b>.<br />
* <br />
* Starting with Processing release 0134, all files loaded and saved by the
* Processing API use UTF-8 encoding. In previous releases, the default
* encoding for your platform was used, which causes problems when files are
* moved to other platforms.
*
* <h3>Advanced</h3> Load data from a file and shove it into a String array.
* <p>
* Exceptions are handled internally, when an error, occurs, an exception is
* printed to the console and 'null' is returned, but the program continues
* running. This is a tradeoff between 1) showing the user that there was a
* problem but 2) not requiring that all i/o code is contained in try/catch
* blocks, for the sake of new users (or people who are just trying to get
* things done in an informal "scripting" fashion). If you want to handle
* exceptions, use Java methods for I/O.
*
* @webref input:files
* @webBrief Reads the contents of a file or url and creates a <b>String</b> array of
* its individual lines
* @param filename
* name of the file or url to load
* @see PApplet#loadBytes(String)
* @see PApplet#saveStrings(String, String[])
* @see PApplet#saveBytes(String, byte[])
*/
public String[] loadStrings(String filename) {
InputStream is = createInput(filename);
if (is != null) {
String[] strArr = loadStrings(is);
try {
is.close();
} catch (IOException e) {
printStackTrace(e);
}
return strArr;
}
System.err.println("The file \"" + filename + "\" " +
"is missing or inaccessible, make sure " +
"the URL is valid or that the file has been " +
"added to your sketch and is readable.");
return null;
}
/**
* @nowebref
*/
static public String[] loadStrings(InputStream input) {
BufferedReader reader =
new BufferedReader(new InputStreamReader(input, StandardCharsets.UTF_8));
return loadStrings(reader);
}
static public String[] loadStrings(BufferedReader reader) {
try {
String[] lines = new String[100];
int lineCount = 0;
String line;
while ((line = reader.readLine()) != null) {
if (lineCount == lines.length) {
String[] temp = new String[lineCount << 1];
System.arraycopy(lines, 0, temp, 0, lineCount);
lines = temp;
}
lines[lineCount++] = line;
}
reader.close();
if (lineCount == lines.length) {
return lines;
}
// resize array to appropriate amount for these lines
String[] output = new String[lineCount];
System.arraycopy(lines, 0, output, 0, lineCount);
return output;
} catch (IOException e) {
e.printStackTrace();
//throw new RuntimeException("Error inside loadStrings()");
}
return null;
}
//////////////////////////////////////////////////////////////
// FILE OUTPUT
/**
*
* Similar to <b>createInput()</b>, this creates a Java <b>OutputStream</b>
* for a given filename or path. The file will be created in the sketch
* folder, or in the same folder as an exported application. <br />
* <br />
* If the path does not exist, intermediate folders will be created. If an
* exception occurs, it will be printed to the console, and <b>null</b> will
* be returned. <br />
* <br />
* This function is a convenience over the Java approach that requires you to
* 1) create a FileOutputStream object, 2) determine the exact file location,
* and 3) handle exceptions. Exceptions are handled internally by the
* function, which is more appropriate for "sketch" projects. <br />
* <br />
* If the output filename ends with <b>.gz</b>, the output will be
* automatically GZIP compressed as it is written.
*
* @webref output:files
* @webBrief Similar to <b>createInput()</b>, this creates a Java
* <b>OutputStream</b> for a given filename or path
* @param filename
* name of the file to open
* @see PApplet#createInput(String)
* @see PApplet#selectOutput(String,String)
*/
public OutputStream createOutput(String filename) {
return createOutput(saveFile(filename));
}
/**
* @nowebref
*/
static public OutputStream createOutput(File file) {
try {
createPath(file); // make sure the path exists
OutputStream output = new FileOutputStream(file);
if (file.getName().toLowerCase().endsWith(".gz")) {
return new BufferedOutputStream(new GZIPOutputStream(output));
}
return new BufferedOutputStream(output);
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
/**
*
* Save the contents of a stream to a file in the sketch folder. This is
* basically <b>saveBytes(blah, loadBytes())</b>, but done more efficiently
* (and with less confusing syntax).<br />
* <br />
* The <b>target</b> parameter can be either a String specifying a file name,
* or, for greater control over the file location, a <b>File</b> object. (Note
* that, unlike some other functions, this will not automatically compress or
* uncompress gzip files.)
*
* @webref output:files
* @webBrief Save the contents of a stream to a file in the sketch folder
* @param target
* name of the file to write to
* @param source
* location to read from (a filename, path, or URL)
* @see PApplet#createOutput(String)
*/
public boolean saveStream(String target, String source) {
return saveStream(saveFile(target), source);
}
/**
* Identical to the other saveStream(), but writes to a File
* object, for greater control over the file location.
* <p/>
* Note that unlike other api methods, this will not automatically
* compress or uncompress gzip files.
*/
public boolean saveStream(File target, String source) {
return saveStream(target, createInputRaw(source));
}
/**
* @nowebref
*/
public boolean saveStream(String target, InputStream source) {
return saveStream(saveFile(target), source);
}
/**
* @nowebref
*/
static public boolean saveStream(File target, InputStream source) {
File tempFile = null;
try {
// make sure that this path actually exists before writing
createPath(target);
tempFile = createTempFile(target);
FileOutputStream targetStream = new FileOutputStream(tempFile);
saveStream(targetStream, source);
targetStream.close();
if (target.exists()) {
if (!target.delete()) {
System.err.println("Could not replace " + target);
}
}
if (!tempFile.renameTo(target)) {
System.err.println("Could not rename temporary file " + tempFile);
return false;
}
return true;
} catch (IOException e) {
if (tempFile != null) {
if (!tempFile.delete()) {
System.err.println("Could not rename temporary file " + tempFile);
}
}
e.printStackTrace();
return false;
}
}
/**
* @nowebref
*/
static public void saveStream(OutputStream target,
InputStream source) throws IOException {
BufferedInputStream bis = new BufferedInputStream(source, 16384);
BufferedOutputStream bos = new BufferedOutputStream(target);
byte[] buffer = new byte[8192];
int bytesRead;
while ((bytesRead = bis.read(buffer)) != -1) {
bos.write(buffer, 0, bytesRead);
}
bos.flush();
}
/**
*
* As the opposite of <b>loadBytes()</b>, this function will write an entire
* array of bytes to a file. The data is saved in binary format. This file is
* saved to the sketch's folder, which is opened by selecting "Show Sketch
* Folder" from the "Sketch" menu. Alternatively, the files can be saved to
* any location on the computer by using an absolute path (something that
* starts with / on Unix and Linux, or a drive letter on Windows).
*
* @webref output:files
* @webBrief Opposite of <b>loadBytes()</b>, will write an entire array of
* bytes to a file
* @param filename
* name of the file to write to
* @param data
* array of bytes to be written
* @see PApplet#loadStrings(String)
* @see PApplet#loadBytes(String)
* @see PApplet#saveStrings(String, String[])
*/
public void saveBytes(String filename, byte[] data) {
saveBytes(saveFile(filename), data);
}
/**
* Creates a temporary file based on the name/extension of another file
* and in the same parent directory. Ensures that the same extension is used
* (i.e. so that .gz files are gzip compressed on output) and that it's done
* from the same directory so that renaming the file later won't cross file
* system boundaries.
*/
static private File createTempFile(File file) throws IOException {
File parentDir = file.getParentFile();
if (!parentDir.exists()) {
if (!parentDir.mkdirs()) {
throw new IOException("Could not make directories for " + parentDir);
}
}
String name = file.getName();
String prefix;
String suffix = null;
int dot = name.lastIndexOf('.');
if (dot == -1) {
prefix = name;
} else {
// preserve the extension so that .gz works properly
prefix = name.substring(0, dot);
suffix = name.substring(dot);
}
// Prefix must be three characters
if (prefix.length() < 3) {
prefix += "processing";
}
return File.createTempFile(prefix, suffix, parentDir);
}
/**
* @nowebref
* Saves bytes to a specific File location specified by the user.
*/
static public void saveBytes(File file, byte[] data) {
File tempFile = null;
try {
tempFile = createTempFile(file);
OutputStream output = createOutput(tempFile);
if (output != null) {
saveBytes(output, data);
output.close();
} else {
System.err.println("Could not write to " + tempFile);
}
if (file.exists()) {
if (!file.delete()) {
System.err.println("Could not replace " + file);
}
}
if (!tempFile.renameTo(file)) {
System.err.println("Could not rename temporary file " + tempFile);
}
} catch (IOException e) {
System.err.println("error saving bytes to " + file);
if (tempFile != null) {
if (!tempFile.delete()) {
System.err.println("Could not delete temporary file " + tempFile);
}
}
e.printStackTrace();
}
}
/**
* @nowebref
* Spews a buffer of bytes to an OutputStream.
*/
static public void saveBytes(OutputStream output, byte[] data) {
try {
output.write(data);
output.flush();
} catch (IOException e) {
e.printStackTrace();
}
}
//
/**
*
* Writes an array of Strings to a file, one line per String. By default, this
* file is saved to the sketch's folder. This folder is opened by selecting
* "Show Sketch Folder" from the "Sketch" menu.<br />
* <br />
* Alternatively, the file can be saved to any location on the computer by
* using an absolute path (something that starts with / on Unix and Linux, or
* a drive letter on Windows).<br />
* <br />
* Starting with Processing 1.0, all files loaded and saved by the Processing
* API use UTF-8 encoding. In earlier releases, the default encoding for your
* platform was used, which causes problems when files are moved to other
* platforms.
*
* @webref output:files
* @webBrief Writes an array of strings to a file, one line per string
* @param filename
* filename for output
* @param data
* string array to be written
* @see PApplet#loadStrings(String)
* @see PApplet#loadBytes(String)
* @see PApplet#saveBytes(String, byte[])
*/
public void saveStrings(String filename, String[] data) {
saveStrings(saveFile(filename), data);
}
/**
* @nowebref
*/
static public void saveStrings(File file, String[] data) {
saveStrings(createOutput(file), data);
}
/**
* @nowebref
*/
static public void saveStrings(OutputStream output, String[] data) {
PrintWriter writer = createWriter(output);
for (String item : data) {
writer.println(item);
}
writer.flush();
writer.close();
}
//////////////////////////////////////////////////////////////
static protected String calcSketchPath() {
// try to get the user folder. if running under java web start,
// this may cause a security exception if the code is not signed.
// http://processing.org/discourse/yabb_beta/YaBB.cgi?board=Integrate;action=display;num=1159386274
String folder = null;
try {
folder = System.getProperty("user.dir");
URL jarURL =
PApplet.class.getProtectionDomain().getCodeSource().getLocation();
// Decode URL
String jarPath = jarURL.toURI().getSchemeSpecificPart();
// Workaround for bug in Java for OS X from Oracle (7u51)
// https://github.com/processing/processing/issues/2181
if (platform == MACOS) {
if (jarPath.contains("Contents/Java/")) {
String appPath = jarPath.substring(0, jarPath.indexOf(".app") + 4);
File containingFolder = new File(appPath).getParentFile();
folder = containingFolder.getAbsolutePath();
}
} else {
// Working directory may not be set properly, try some options
// https://github.com/processing/processing/issues/2195
if (jarPath.contains("/lib/")) {
// Windows or Linux, back up a directory to get the executable
folder = new File(jarPath, "../..").getCanonicalPath();
}
}
} catch (Exception e) {
e.printStackTrace();
}
return folder;
}
public String sketchPath() {
if (sketchPath == null) {
sketchPath = calcSketchPath();
}
return sketchPath;
}
/**
* Prepend the sketch folder path to the filename (or path) that is
* passed in. External libraries should use this function to save to
* the sketch folder.
* <p/>
* This will also cause an error if the sketch is not inited properly,
* meaning that init() was never called on the PApplet when hosted
* my some other main() or by other code. For proper use of init(),
* see the examples in the main description text for PApplet.
*/
public String sketchPath(String where) {
if (sketchPath() == null) {
return where;
}
// isAbsolute() could throw an access exception, but so will writing
// to the local disk using the sketch path, so this is safe here.
// for 0120, added a try/catch anyways.
try {
if (new File(where).isAbsolute()) {
return where;
}
} catch (Exception e) {
// do nothing
}
return sketchPath() + File.separator + where;
}
public File sketchFile(String where) {
return new File(sketchPath(where));
}
/**
* Returns a path adjacent the application to save to. Like sketchPath(),
* but creates any in-between folders so that things save properly.
* <p/>
* All saveXxxx() functions use the path to the sketch folder, rather than
* its data folder. Once exported, the data folder will be found inside the
* jar file of the exported application. In this case, it's not
* possible to save data into the jar file, because it will often be running
* from a server, or marked in-use if running from a local file system.
* With this in mind, saving to the data path doesn't make sense anyway.
* If you know you're running locally, and want to save to the data folder,
* use <TT>saveXxxx("data/blah.dat")</TT>.
*/
public String savePath(String where) {
if (where == null) return null;
String filename = sketchPath(where);
createPath(filename);
return filename;
}
/**
* Identical to savePath(), but returns a File object.
*/
public File saveFile(String where) {
return new File(savePath(where));
}
static File desktopFolder;
static public File desktopFile(String what) {
if (desktopFolder == null) {
// Should work on Linux and OS X (on OS X, even with the localized version).
desktopFolder = new File(System.getProperty("user.home"), "Desktop");
if (!desktopFolder.exists()) {
if (platform == WINDOWS && !disableAWT) {
desktopFolder = ShimAWT.getWindowsDesktop();
} else {
throw new UnsupportedOperationException("Could not find a suitable Desktop folder");
}
}
}
return new File(desktopFolder, what);
}
static public String desktopPath(String what) {
return desktopFile(what).getAbsolutePath();
}
/**
* <b>This function almost certainly does not do the thing you want it to.</b>
* The data path is handled differently on each platform, and should not be
* considered a location to write files. It should also not be assumed that
* this location can be read from or listed. This function is used internally
* as a possible location for reading files. It's still "public" as a
* holdover from earlier code.
* <p>
* Libraries should use createInput() to get an InputStream or createOutput()
* to get an OutputStream. sketchPath() can be used to get a location
* relative to the sketch. Again, <b>do not</b> use this to get relative
* locations of files. You'll be disappointed when your app runs on different
* platforms.
*/
public String dataPath(String where) {
return dataFile(where).getAbsolutePath();
}
/**
* Return a full path to an item in the data folder as a File object.
* See the dataPath() method for more information.
*/
public File dataFile(String where) {
// isAbsolute() could throw an access exception, but so will writing
// to the local disk using the sketch path, so this is safe here.
File why = new File(where);
if (why.isAbsolute()) return why;
URL jarURL = getClass().getProtectionDomain().getCodeSource().getLocation();
// Decode URL
String jarPath;
try {
jarPath = jarURL.toURI().getPath();
} catch (URISyntaxException e) {
e.printStackTrace();
return null;
}
if (jarPath.contains("Contents/Java/")) {
File containingFolder = new File(jarPath).getParentFile();
File dataFolder = new File(containingFolder, "data");
return new File(dataFolder, where);
}
// Windows, Linux, or when not using a Mac OS X .app file
return new File(sketchPath + File.separator + "data" + File.separator + where);
}
/**
* Takes a path and creates any in-between folders if they don't
* already exist. Useful when trying to save to a subfolder that
* may not actually exist.
*/
static public void createPath(String path) {
createPath(new File(path));
}
static public void createPath(File file) {
try {
String parent = file.getParent();
if (parent != null) {
File unit = new File(parent);
if (!unit.exists()) {
boolean result = unit.mkdirs();
if (!result) {
System.err.println("Could not create " + unit);
}
}
}
} catch (SecurityException se) {
System.err.println("You don't have permissions to create " +
file.getAbsolutePath());
}
}
static public String getExtension(String filename) {
String extension;
String lower = filename.toLowerCase();
int dot = filename.lastIndexOf('.');
if (dot == -1) {
return ""; // no extension found
}
extension = lower.substring(dot + 1);
// check for, and strip any parameters on the url, i.e.
// filename.jpg?blah=blah&something=that
int question = extension.indexOf('?');
if (question != -1) {
extension = extension.substring(0, question);
}
return extension;
}
//////////////////////////////////////////////////////////////
// URL ENCODING
static public String urlEncode(String str) {
return URLEncoder.encode(str, StandardCharsets.UTF_8);
}
// DO NOT use for file paths, URLDecoder can't handle RFC2396
// "The recommended way to manage the encoding and decoding of
// URLs is to use URI, and to convert between these two classes
// using toURI() and URI.toURL()."
// https://docs.oracle.com/javase/8/docs/api/java/net/URL.html
static public String urlDecode(String str) {
return URLDecoder.decode(str, StandardCharsets.UTF_8);
}
//////////////////////////////////////////////////////////////
// SORT
/**
*
* Sorts an array of numbers from smallest to largest, or puts an array of
* words in alphabetical order. The original array is not modified; a
* re-ordered array is returned. The <b>count</b> parameter states the number
* of elements to sort. For example, if there are 12 elements in an array and
* <b>count</b> is set to 5, only the first 5 elements in the array will be
* sorted. <!--As of release 0126, the alphabetical ordering is case
* insensitive.-->
*
* @webref data:array functions
* @webBrief Sorts an array of numbers from smallest to largest and puts an
* array of words in alphabetical order
* @param list
* array to sort
* @see PApplet#reverse(boolean[])
*/
static public byte[] sort(byte[] list) {
return sort(list, list.length);
}
/**
* @param count number of elements to sort, starting from 0
*/
static public byte[] sort(byte[] list, int count) {
byte[] outgoing = new byte[list.length];
System.arraycopy(list, 0, outgoing, 0, list.length);
Arrays.sort(outgoing, 0, count);
return outgoing;
}
static public char[] sort(char[] list) {
return sort(list, list.length);
}
static public char[] sort(char[] list, int count) {
char[] outgoing = new char[list.length];
System.arraycopy(list, 0, outgoing, 0, list.length);
Arrays.sort(outgoing, 0, count);
return outgoing;
}
static public int[] sort(int[] list) {
return sort(list, list.length);
}
static public int[] sort(int[] list, int count) {
int[] outgoing = new int[list.length];
System.arraycopy(list, 0, outgoing, 0, list.length);
Arrays.sort(outgoing, 0, count);
return outgoing;
}
static public float[] sort(float[] list) {
return sort(list, list.length);
}
static public float[] sort(float[] list, int count) {
float[] outgoing = new float[list.length];
System.arraycopy(list, 0, outgoing, 0, list.length);
Arrays.sort(outgoing, 0, count);
return outgoing;
}
static public String[] sort(String[] list) {
return sort(list, list.length);
}
static public String[] sort(String[] list, int count) {
String[] outgoing = new String[list.length];
System.arraycopy(list, 0, outgoing, 0, list.length);
Arrays.sort(outgoing, 0, count);
return outgoing;
}
//////////////////////////////////////////////////////////////
// ARRAY UTILITIES
/**
*
* Copies an array (or part of an array) to another array. The <b>src</b>
* array is copied to the <b>dst</b> array, beginning at the position
* specified by <b>srcPosition</b> and into the position specified by
* <b>dstPosition</b>. The number of elements to copy is determined by
* <b>length</b>. Note that copying values overwrites existing values in the
* destination array. To append values instead of overwriting them, use
* <b>concat()</b>.<br />
* <br />
* The simplified version with only two arguments — <b>arrayCopy(src,
* dst)</b> — copies an entire array to another of the same size. It is
* equivalent to <b>arrayCopy(src, 0, dst, 0, src.length)</b>.<br />
* <br />
* Using this function is far more efficient for copying array data than
* iterating through a <b>for()</b> loop and copying each element
* individually. This function only copies references, which means that for
* most purposes it only copies one-dimensional arrays (a single set of
* brackets). If used with a two (or three or more) dimensional array, it will
* only copy the references at the first level, because a two-dimensional
* array is simply an "array of arrays". This does not produce an error,
* however, because this is often the desired behavior. Internally, this
* function calls Java's <a href=
* "https://docs.oracle.com/javase/8/docs/api/java/lang/System.html#arraycopy-java.lang.Object-int-java.lang.Object-int-int-">System.arraycopy()</a>
* method, so most things that apply there are inherited.
*
* @webref data:array functions
* @webBrief Copies an array (or part of an array) to another array
* @param src
* the source array
* @param srcPosition
* starting position in the source array
* @param dst
* the destination array of the same data type as the source array
* @param dstPosition
* starting position in the destination array
* @param length
* number of array elements to be copied
* @see PApplet#concat(boolean[], boolean[])
*/
@SuppressWarnings("SuspiciousSystemArraycopy")
static public void arrayCopy(Object src, int srcPosition,
Object dst, int dstPosition,
int length) {
System.arraycopy(src, srcPosition, dst, dstPosition, length);
}
/**
* Convenience method for arraycopy().
* Identical to <CODE>arraycopy(src, 0, dst, 0, length);</CODE>
*/
@SuppressWarnings("SuspiciousSystemArraycopy")
static public void arrayCopy(Object src, Object dst, int length) {
System.arraycopy(src, 0, dst, 0, length);
}
/**
* Shortcut to copy the entire contents of
* the source into the destination array.
* Identical to <CODE>arraycopy(src, 0, dst, 0, src.length);</CODE>
*/
@SuppressWarnings("SuspiciousSystemArraycopy")
static public void arrayCopy(Object src, Object dst) {
System.arraycopy(src, 0, dst, 0, Array.getLength(src));
}
/**
* Use arrayCopy() instead.
*/
@SuppressWarnings("SuspiciousSystemArraycopy")
@Deprecated
static public void arraycopy(Object src, int srcPosition,
Object dst, int dstPosition,
int length) {
System.arraycopy(src, srcPosition, dst, dstPosition, length);
}
/**
* Use arrayCopy() instead.
*/
@SuppressWarnings("SuspiciousSystemArraycopy")
@Deprecated
static public void arraycopy(Object src, Object dst, int length) {
System.arraycopy(src, 0, dst, 0, length);
}
/**
* Use arrayCopy() instead.
*/
@SuppressWarnings("SuspiciousSystemArraycopy")
@Deprecated
static public void arraycopy(Object src, Object dst) {
System.arraycopy(src, 0, dst, 0, Array.getLength(src));
}
/**
*
* Increases the size of a one-dimensional array. By default, this function
* doubles the size of the array, but the optional <b>newSize</b> parameter
* provides precise control over the increase in size.
* <p/>
* When using an array of objects, the data returned from the function must be
* cast to the object array's data type. For example: <em>SomeClass[] items =
* (SomeClass[]) expand(originalArray)</em>
*
* @webref data:array functions
* @webBrief Increases the size of an array
* @param list
* the array to expand
* @see PApplet#shorten(boolean[])
*/
static public boolean[] expand(boolean[] list) {
return expand(list, list.length > 0 ? list.length << 1 : 1);
}
/**
* @param newSize new size for the array
*/
static public boolean[] expand(boolean[] list, int newSize) {
boolean[] temp = new boolean[newSize];
System.arraycopy(list, 0, temp, 0, Math.min(newSize, list.length));
return temp;
}
static public byte[] expand(byte[] list) {
return expand(list, list.length > 0 ? list.length << 1 : 1);
}
static public byte[] expand(byte[] list, int newSize) {
byte[] temp = new byte[newSize];
System.arraycopy(list, 0, temp, 0, Math.min(newSize, list.length));
return temp;
}
static public char[] expand(char[] list) {
return expand(list, list.length > 0 ? list.length << 1 : 1);
}
static public char[] expand(char[] list, int newSize) {
char[] temp = new char[newSize];
System.arraycopy(list, 0, temp, 0, Math.min(newSize, list.length));
return temp;
}
static public int[] expand(int[] list) {
return expand(list, list.length > 0 ? list.length << 1 : 1);
}
static public int[] expand(int[] list, int newSize) {
int[] temp = new int[newSize];
System.arraycopy(list, 0, temp, 0, Math.min(newSize, list.length));
return temp;
}
static public long[] expand(long[] list) {
return expand(list, list.length > 0 ? list.length << 1 : 1);
}
static public long[] expand(long[] list, int newSize) {
long[] temp = new long[newSize];
System.arraycopy(list, 0, temp, 0, Math.min(newSize, list.length));
return temp;
}
static public float[] expand(float[] list) {
return expand(list, list.length > 0 ? list.length << 1 : 1);
}
static public float[] expand(float[] list, int newSize) {
float[] temp = new float[newSize];
System.arraycopy(list, 0, temp, 0, Math.min(newSize, list.length));
return temp;
}
static public double[] expand(double[] list) {
return expand(list, list.length > 0 ? list.length << 1 : 1);
}
static public double[] expand(double[] list, int newSize) {
double[] temp = new double[newSize];
System.arraycopy(list, 0, temp, 0, Math.min(newSize, list.length));
return temp;
}
static public String[] expand(String[] list) {
return expand(list, list.length > 0 ? list.length << 1 : 1);
}
static public String[] expand(String[] list, int newSize) {
String[] temp = new String[newSize];
// in case the new size is smaller than list.length
System.arraycopy(list, 0, temp, 0, Math.min(newSize, list.length));
return temp;
}
/**
* @nowebref
*/
static public Object expand(Object array) {
int len = Array.getLength(array);
return expand(array, len > 0 ? len << 1 : 1);
}
@SuppressWarnings("SuspiciousSystemArraycopy")
static public Object expand(Object list, int newSize) {
Class<?> type = list.getClass().getComponentType();
Object temp = Array.newInstance(type, newSize);
System.arraycopy(list, 0, temp, 0,
Math.min(Array.getLength(list), newSize));
return temp;
}
// contract() has been removed in revision 0124, use subset() instead.
// (expand() is also functionally equivalent)
/**
*
* Expands an array by one element and adds data to the new position. The
* datatype of the <b>element</b> parameter must be the same as the
* datatype of the array.
* <br/> <br/>
* When using an array of objects, the data returned from the function must
* be cast to the object array's data type. For example: <em>SomeClass[]
* items = (SomeClass[]) append(originalArray, element)</em>.
*
* @webref data:array functions
* @webBrief Expands an array by one element and adds data to the new position
* @param array array to append
* @param value new data for the array
* @see PApplet#shorten(boolean[])
* @see PApplet#expand(boolean[])
*/
static public byte[] append(byte[] array, byte value) {
array = expand(array, array.length + 1);
array[array.length-1] = value;
return array;
}
static public char[] append(char[] array, char value) {
array = expand(array, array.length + 1);
array[array.length-1] = value;
return array;
}
static public int[] append(int[] array, int value) {
array = expand(array, array.length + 1);
array[array.length-1] = value;
return array;
}
static public float[] append(float[] array, float value) {
array = expand(array, array.length + 1);
array[array.length-1] = value;
return array;
}
static public String[] append(String[] array, String value) {
array = expand(array, array.length + 1);
array[array.length-1] = value;
return array;
}
static public Object append(Object array, Object value) {
int length = Array.getLength(array);
array = expand(array, length + 1);
Array.set(array, length, value);
return array;
}
/**
*
* Decreases an array by one element and returns the shortened array.
* <br/> <br/>
* When using an array of objects, the data returned from the function must
* be cast to the object array's data type. For example: <em>SomeClass[]
* items = (SomeClass[]) shorten(originalArray)</em>.
*
* @webref data:array functions
* @webBrief Decreases an array by one element and returns the shortened array
* @param list array to shorten
* @see PApplet#append(byte[], byte)
* @see PApplet#expand(boolean[])
*/
static public boolean[] shorten(boolean[] list) {
return subset(list, 0, list.length-1);
}
static public byte[] shorten(byte[] list) {
return subset(list, 0, list.length-1);
}
static public char[] shorten(char[] list) {
return subset(list, 0, list.length-1);
}
static public int[] shorten(int[] list) {
return subset(list, 0, list.length-1);
}
static public float[] shorten(float[] list) {
return subset(list, 0, list.length-1);
}
static public String[] shorten(String[] list) {
return subset(list, 0, list.length-1);
}
static public Object shorten(Object list) {
int length = Array.getLength(list);
return subset(list, 0, length - 1);
}
/**
*
* Inserts a value or an array of values into an existing array. The first two
* parameters must be arrays of the same datatype. The first parameter
* specifies the initial array to be modified, and the second parameter
* defines the data to be inserted. The third parameter is an index value
* which specifies the array position from which to insert data. (Remember
* that array index numbering starts at zero, so the first position is 0, the
* second position is 1, and so on.)<br />
* <br />
* When splicing an array of objects, the data returned from the function must
* be cast to the object array's data type. For example: <em>SomeClass[] items
* = (SomeClass[]) splice(array1, array2, index)</em>
*
* @webref data:array functions
* @webBrief Inserts a value or array of values into an existing array
* @param list
* array to splice into
* @param value
* value to be spliced in
* @param index
* position in the array from which to insert data
* @see PApplet#concat(boolean[], boolean[])
* @see PApplet#subset(boolean[], int, int)
*/
static final public boolean[] splice(boolean[] list,
boolean value, int index) {
boolean[] outgoing = new boolean[list.length + 1];
System.arraycopy(list, 0, outgoing, 0, index);
outgoing[index] = value;
System.arraycopy(list, index, outgoing, index + 1,
list.length - index);
return outgoing;
}
static final public boolean[] splice(boolean[] list,
boolean[] value, int index) {
boolean[] outgoing = new boolean[list.length + value.length];
System.arraycopy(list, 0, outgoing, 0, index);
System.arraycopy(value, 0, outgoing, index, value.length);
System.arraycopy(list, index, outgoing, index + value.length,
list.length - index);
return outgoing;
}
static final public byte[] splice(byte[] list,
byte value, int index) {
byte[] outgoing = new byte[list.length + 1];
System.arraycopy(list, 0, outgoing, 0, index);
outgoing[index] = value;
System.arraycopy(list, index, outgoing, index + 1,
list.length - index);
return outgoing;
}
static final public byte[] splice(byte[] list,
byte[] value, int index) {
byte[] outgoing = new byte[list.length + value.length];
System.arraycopy(list, 0, outgoing, 0, index);
System.arraycopy(value, 0, outgoing, index, value.length);
System.arraycopy(list, index, outgoing, index + value.length,
list.length - index);
return outgoing;
}
static final public char[] splice(char[] list,
char value, int index) {
char[] outgoing = new char[list.length + 1];
System.arraycopy(list, 0, outgoing, 0, index);
outgoing[index] = value;
System.arraycopy(list, index, outgoing, index + 1,
list.length - index);
return outgoing;
}
static final public char[] splice(char[] list,
char[] value, int index) {
char[] outgoing = new char[list.length + value.length];
System.arraycopy(list, 0, outgoing, 0, index);
System.arraycopy(value, 0, outgoing, index, value.length);
System.arraycopy(list, index, outgoing, index + value.length,
list.length - index);
return outgoing;
}
static final public int[] splice(int[] list,
int value, int index) {
int[] outgoing = new int[list.length + 1];
System.arraycopy(list, 0, outgoing, 0, index);
outgoing[index] = value;
System.arraycopy(list, index, outgoing, index + 1,
list.length - index);
return outgoing;
}
static final public int[] splice(int[] list,
int[] value, int index) {
int[] outgoing = new int[list.length + value.length];
System.arraycopy(list, 0, outgoing, 0, index);
System.arraycopy(value, 0, outgoing, index, value.length);
System.arraycopy(list, index, outgoing, index + value.length,
list.length - index);
return outgoing;
}
static final public float[] splice(float[] list,
float value, int index) {
float[] outgoing = new float[list.length + 1];
System.arraycopy(list, 0, outgoing, 0, index);
outgoing[index] = value;
System.arraycopy(list, index, outgoing, index + 1,
list.length - index);
return outgoing;
}
static final public float[] splice(float[] list,
float[] value, int index) {
float[] outgoing = new float[list.length + value.length];
System.arraycopy(list, 0, outgoing, 0, index);
System.arraycopy(value, 0, outgoing, index, value.length);
System.arraycopy(list, index, outgoing, index + value.length,
list.length - index);
return outgoing;
}
static final public String[] splice(String[] list,
String value, int index) {
String[] outgoing = new String[list.length + 1];
System.arraycopy(list, 0, outgoing, 0, index);
outgoing[index] = value;
System.arraycopy(list, index, outgoing, index + 1,
list.length - index);
return outgoing;
}
static final public String[] splice(String[] list,
String[] value, int index) {
String[] outgoing = new String[list.length + value.length];
System.arraycopy(list, 0, outgoing, 0, index);
System.arraycopy(value, 0, outgoing, index, value.length);
System.arraycopy(list, index, outgoing, index + value.length,
list.length - index);
return outgoing;
}
@SuppressWarnings("SuspiciousSystemArraycopy")
static final public Object splice(Object list, Object value, int index) {
Class<?> type = list.getClass().getComponentType();
Object outgoing;
int length = Array.getLength(list);
// check whether item being spliced in is an array
if (value.getClass().getName().charAt(0) == '[') {
int vlength = Array.getLength(value);
outgoing = Array.newInstance(type, length + vlength);
System.arraycopy(list, 0, outgoing, 0, index);
System.arraycopy(value, 0, outgoing, index, vlength);
System.arraycopy(list, index, outgoing, index + vlength, length - index);
} else {
outgoing = Array.newInstance(type, length + 1);
System.arraycopy(list, 0, outgoing, 0, index);
Array.set(outgoing, index, value);
System.arraycopy(list, index, outgoing, index + 1, length - index);
}
return outgoing;
}
static public boolean[] subset(boolean[] list, int start) {
return subset(list, start, list.length - start);
}
/**
*
* Extracts an array of elements from an existing array. The <b>list</b>
* parameter defines the array from which the elements will be copied, and the
* <b>start</b> and <b>count</b> parameters specify which elements to extract.
* If no <b>count</b> is given, elements will be extracted from the
* <b>start</b> to the end of the array. When specifying the <b>start</b>,
* remember that the first array element is 0. This function does not change
* the source array.<br />
* <br />
* When using an array of objects, the data returned from the function must be
* cast to the object array's data type. For example: <em>SomeClass[] items =
* (SomeClass[]) subset(originalArray, 0, 4)</em>
*
* @webref data:array functions
* @webBrief Extracts an array of elements from an existing array
* @param list
* array to extract from
* @param start
* position to begin
* @param count
* number of values to extract
* @see PApplet#splice(boolean[], boolean, int)
*/
static public boolean[] subset(boolean[] list, int start, int count) {
boolean[] output = new boolean[count];
System.arraycopy(list, start, output, 0, count);
return output;
}
static public byte[] subset(byte[] list, int start) {
return subset(list, start, list.length - start);
}
static public byte[] subset(byte[] list, int start, int count) {
byte[] output = new byte[count];
System.arraycopy(list, start, output, 0, count);
return output;
}
static public char[] subset(char[] list, int start) {
return subset(list, start, list.length - start);
}
static public char[] subset(char[] list, int start, int count) {
char[] output = new char[count];
System.arraycopy(list, start, output, 0, count);
return output;
}
static public int[] subset(int[] list, int start) {
return subset(list, start, list.length - start);
}
static public int[] subset(int[] list, int start, int count) {
int[] output = new int[count];
System.arraycopy(list, start, output, 0, count);
return output;
}
static public long[] subset(long[] list, int start) {
return subset(list, start, list.length - start);
}
static public long[] subset(long[] list, int start, int count) {
long[] output = new long[count];
System.arraycopy(list, start, output, 0, count);
return output;
}
static public float[] subset(float[] list, int start) {
return subset(list, start, list.length - start);
}
static public float[] subset(float[] list, int start, int count) {
float[] output = new float[count];
System.arraycopy(list, start, output, 0, count);
return output;
}
static public double[] subset(double[] list, int start) {
return subset(list, start, list.length - start);
}
static public double[] subset(double[] list, int start, int count) {
double[] output = new double[count];
System.arraycopy(list, start, output, 0, count);
return output;
}
static public String[] subset(String[] list, int start) {
return subset(list, start, list.length - start);
}
static public String[] subset(String[] list, int start, int count) {
String[] output = new String[count];
System.arraycopy(list, start, output, 0, count);
return output;
}
static public Object subset(Object list, int start) {
int length = Array.getLength(list);
return subset(list, start, length - start);
}
@SuppressWarnings("SuspiciousSystemArraycopy")
static public Object subset(Object list, int start, int count) {
Class<?> type = list.getClass().getComponentType();
Object outgoing = Array.newInstance(type, count);
System.arraycopy(list, start, outgoing, 0, count);
return outgoing;
}
/**
*
* Concatenates two arrays. For example, concatenating the array { 1, 2, 3 }
* and the array { 4, 5, 6 } yields { 1, 2, 3, 4, 5, 6 }. Both parameters must
* be arrays of the same datatype. <br />
* <br />
* When using an array of objects, the data returned from the function must be
* cast to the object array's data type. For example: <em>SomeClass[] items =
* (SomeClass[]) concat(array1, array2)</em>.
*
* @webref data:array functions
* @webBrief Concatenates two arrays
* @param a
* first array to concatenate
* @param b
* second array to concatenate
* @see PApplet#splice(boolean[], boolean, int)
* @see PApplet#arrayCopy(Object, int, Object, int, int)
*/
static public boolean[] concat(boolean[] a, boolean[] b) {
boolean[] c = new boolean[a.length + b.length];
System.arraycopy(a, 0, c, 0, a.length);
System.arraycopy(b, 0, c, a.length, b.length);
return c;
}
static public byte[] concat(byte[] a, byte[] b) {
byte[] c = new byte[a.length + b.length];
System.arraycopy(a, 0, c, 0, a.length);
System.arraycopy(b, 0, c, a.length, b.length);
return c;
}
static public char[] concat(char[] a, char[] b) {
char[] c = new char[a.length + b.length];
System.arraycopy(a, 0, c, 0, a.length);
System.arraycopy(b, 0, c, a.length, b.length);
return c;
}
static public int[] concat(int[] a, int[] b) {
int[] c = new int[a.length + b.length];
System.arraycopy(a, 0, c, 0, a.length);
System.arraycopy(b, 0, c, a.length, b.length);
return c;
}
static public float[] concat(float[] a, float[] b) {
float[] c = new float[a.length + b.length];
System.arraycopy(a, 0, c, 0, a.length);
System.arraycopy(b, 0, c, a.length, b.length);
return c;
}
static public String[] concat(String[] a, String[] b) {
String[] c = new String[a.length + b.length];
System.arraycopy(a, 0, c, 0, a.length);
System.arraycopy(b, 0, c, a.length, b.length);
return c;
}
@SuppressWarnings("SuspiciousSystemArraycopy")
static public Object concat(Object a, Object b) {
Class<?> type = a.getClass().getComponentType();
int alength = Array.getLength(a);
int blength = Array.getLength(b);
Object outgoing = Array.newInstance(type, alength + blength);
System.arraycopy(a, 0, outgoing, 0, alength);
System.arraycopy(b, 0, outgoing, alength, blength);
return outgoing;
}
//
/**
*
* Reverses the order of an array.
*
* @webref data:array functions
* @webBrief Reverses the order of an array
* @param list booleans[], bytes[], chars[], ints[], floats[], or Strings[]
* @see PApplet#sort(String[], int)
*/
static public boolean[] reverse(boolean[] list) {
boolean[] outgoing = new boolean[list.length];
int length1 = list.length - 1;
for (int i = 0; i < list.length; i++) {
outgoing[i] = list[length1 - i];
}
return outgoing;
}
static public byte[] reverse(byte[] list) {
byte[] outgoing = new byte[list.length];
int length1 = list.length - 1;
for (int i = 0; i < list.length; i++) {
outgoing[i] = list[length1 - i];
}
return outgoing;
}
static public char[] reverse(char[] list) {
char[] outgoing = new char[list.length];
int length1 = list.length - 1;
for (int i = 0; i < list.length; i++) {
outgoing[i] = list[length1 - i];
}
return outgoing;
}
static public int[] reverse(int[] list) {
int[] outgoing = new int[list.length];
int length1 = list.length - 1;
for (int i = 0; i < list.length; i++) {
outgoing[i] = list[length1 - i];
}
return outgoing;
}
static public float[] reverse(float[] list) {
float[] outgoing = new float[list.length];
int length1 = list.length - 1;
for (int i = 0; i < list.length; i++) {
outgoing[i] = list[length1 - i];
}
return outgoing;
}
static public String[] reverse(String[] list) {
String[] outgoing = new String[list.length];
int length1 = list.length - 1;
for (int i = 0; i < list.length; i++) {
outgoing[i] = list[length1 - i];
}
return outgoing;
}
static public Object reverse(Object list) {
Class<?> type = list.getClass().getComponentType();
int length = Array.getLength(list);
Object outgoing = Array.newInstance(type, length);
for (int i = 0; i < length; i++) {
Array.set(outgoing, i, Array.get(list, (length - 1) - i));
}
return outgoing;
}
//////////////////////////////////////////////////////////////
// STRINGS
/**
*
* Removes whitespace characters from the beginning and end of a String. In
* addition to standard whitespace characters such as space, carriage
* return, and tab, this function also removes the Unicode "nbsp" (U+00A0)
* character and the zero width no-break space (U+FEFF) character.
*
* @webref data:string_functions
* @webBrief Removes whitespace characters from the beginning and end of a <b>String</b>
* @param str any string
* @see PApplet#split(String, String)
* @see PApplet#join(String[], char)
*/
static public String trim(String str) {
if (str == null) {
return null;
}
// remove nbsp *and* zero width no-break space
return str.replace('\u00A0', ' ').replace('\uFEFF', ' ').trim();
}
/**
* @param array a String array
*/
static public String[] trim(String[] array) {
if (array == null) {
return null;
}
String[] outgoing = new String[array.length];
for (int i = 0; i < array.length; i++) {
if (array[i] != null) {
outgoing[i] = trim(array[i]);
}
}
return outgoing;
}
/**
*
* Combines an array of Strings into one String, each separated by the
* character(s) used for the <b>separator</b> parameter. To join arrays of
* ints or floats, it's necessary to first convert them to Strings using
* <b>nf()</b> or <b>nfs()</b>.
*
* @webref data:string_functions
* @webBrief Combines an array of <b>Strings</b> into one <b>String</b>, each separated by the
* character(s) used for the <b>separator</b> parameter
* @param list array of Strings
* @param separator char or String to be placed between each item
* @see PApplet#split(String, String)
* @see PApplet#trim(String)
* @see PApplet#nf(float, int, int)
* @see PApplet#nfs(float, int, int)
*/
static public String join(String[] list, char separator) {
return join(list, String.valueOf(separator));
}
static public String join(String[] list, String separator) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < list.length; i++) {
if (i != 0) sb.append(separator);
sb.append(list[i]);
}
return sb.toString();
}
static public String[] splitTokens(String value) {
return splitTokens(value, WHITESPACE);
}
/**
*
* The <b>splitTokens()</b> function splits a <b>String</b> at one or many character
* delimiters or "tokens". The <b>delim</b> parameter specifies the character
* or characters to be used as a boundary.<br />
* <br />
* If no <b>delim</b> characters are specified, any whitespace character is
* used to split. Whitespace characters include tab (\t), line feed
* (\n), carriage return (\r), form feed (\f), and space.<br />
* <br />
* After using this function to parse incoming data, it is common to convert
* the data from Strings to integers or floats by using the datatype
* conversion functions <b>int()</b> and <b>float()</b>.
*
* @webref data:string_functions
* @webBrief The <b>splitTokens()</b> function splits a <b>String</b> at one or many
* character "tokens"
* @param value
* the String to be split
* @param delim
* list of individual characters that will be used as separators
* @see PApplet#split(String, String)
* @see PApplet#join(String[], String)
* @see PApplet#trim(String)
*/
static public String[] splitTokens(String value, String delim) {
StringTokenizer toker = new StringTokenizer(value, delim);
String[] pieces = new String[toker.countTokens()];
int index = 0;
while (toker.hasMoreTokens()) {
pieces[index++] = toker.nextToken();
}
return pieces;
}
/**
*
* The <b>split()</b> function breaks a String into pieces using a character
* or string as the delimiter. The <b>delim</b> parameter specifies the
* character or characters that mark the boundaries between each piece. A
* String[] array is returned that contains each of the pieces. <br />
* <br />
* If the result is a set of numbers, you can convert the String[] array to a
* float[] or int[] array using the datatype conversion functions <b>int()</b>
* and <b>float()</b>. (See the second example above.) <br />
* <br />
* The <b>splitTokens()</b> function works in a similar fashion, except that
* it splits using a range of characters instead of a specific character or
* sequence. <!-- <br />
* <br />
* This function uses regular expressions to determine how the <b>delim</b>
* parameter divides the <b>str</b> parameter. Therefore, if you use
* characters such parentheses and brackets that are used with regular
* expressions as a part of the <b>delim</b> parameter, you'll need to put two
* backslashes (\\\\) in front of the character (see example above). You can
* read more about
* <a href="http://en.wikipedia.org/wiki/Regular_expression">regular
* expressions</a> and
* <a href="http://en.wikipedia.org/wiki/Escape_character">escape
* characters</a> on Wikipedia. -->
*
* @webref data:string_functions
* @webBrief The <b>split()</b> function breaks a string into pieces using a
* character or string as the divider
* @usage web_application
* @param value
* the String to be split
* @param delim
* the character or String used to separate the data
*/
static public String[] split(String value, char delim) {
// do this so that the exception occurs inside the user's
// program, rather than appearing to be a bug inside split()
if (value == null) return null;
//return split(what, String.valueOf(delim)); // huh
char[] chars = value.toCharArray();
int splitCount = 0; //1;
for (char ch : chars) {
if (ch == delim) splitCount++;
}
// make sure that there is something in the input string
//if (chars.length > 0) {
// if the last char is a delimiter, get rid of it..
//if (chars[chars.length-1] == delim) splitCount--;
// on second thought, i don't agree with this, will disable
//}
if (splitCount == 0) {
String[] splits = new String[1];
splits[0] = value;
return splits;
}
//int pieceCount = splitCount + 1;
String[] splits = new String[splitCount + 1];
int splitIndex = 0;
int startIndex = 0;
for (int i = 0; i < chars.length; i++) {
if (chars[i] == delim) {
splits[splitIndex++] =
new String(chars, startIndex, i-startIndex);
startIndex = i + 1;
}
}
//if (startIndex != chars.length) {
splits[splitIndex] =
new String(chars, startIndex, chars.length-startIndex);
//}
return splits;
}
static public String[] split(String value, String delim) {
List<String> items = new ArrayList<>();
int index;
int offset = 0;
while ((index = value.indexOf(delim, offset)) != -1) {
items.add(value.substring(offset, index));
offset = index + delim.length();
}
items.add(value.substring(offset));
String[] outgoing = new String[items.size()];
items.toArray(outgoing);
return outgoing;
}
static protected LinkedHashMap<String, Pattern> matchPatterns;
static Pattern matchPattern(String regexp) {
Pattern p = null;
if (matchPatterns == null) {
matchPatterns = new LinkedHashMap<>(16, 0.75f, true) {
@Override
protected boolean removeEldestEntry(Map.Entry<String, Pattern> eldest) {
// Limit the number of match patterns at 10 most recently used
return size() == 10;
}
};
} else {
p = matchPatterns.get(regexp);
}
if (p == null) {
p = Pattern.compile(regexp, Pattern.MULTILINE | Pattern.DOTALL);
matchPatterns.put(regexp, p);
}
return p;
}
/**
*
* This function is used to apply a regular expression to a piece of text, and
* return matching groups (elements found inside parentheses) as a String
* array. If there are no matches, a <b>null</b> value will be returned. If no groups
* are specified in the regular expression, but the sequence matches, an array
* of length 1 (with the matched text as the first element of the array) will
* be returned.<br />
* <br />
* To use the function, first check to see if the result is <b>null</b>. If the
* result is null, then the sequence did not match at all. If the sequence did
* match, an array is returned.<br />
* <br />
* If there are groups (specified by sets of parentheses) in the regular
* expression, then the contents of each will be returned in the array.
* Element [0] of a regular expression match returns the entire matching
* string, and the match groups start at element [1] (the first group is [1],
* the second [2], and so on).<br />
* <br />
* The syntax can be found in the reference for Java's <a href=
* "https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html">Pattern</a>
* class. For regular expression syntax, read the
* <a href="https://docs.oracle.com/javase/tutorial/essential/regex/">Java
* Tutorial</a> on the topic.
*
* @webref data:string_functions
* @webBrief The function is used to apply a regular expression to a
* piece of text, and return matching groups (elements found inside
* parentheses) as a <b>String</b> array
* @param str
* the String to be searched
* @param regexp
* the regexp to be used for matching
* @see PApplet#matchAll(String, String)
* @see PApplet#split(String, String)
* @see PApplet#splitTokens(String, String)
* @see PApplet#join(String[], String)
* @see PApplet#trim(String)
*/
static public String[] match(String str, String regexp) {
Pattern p = matchPattern(regexp);
Matcher m = p.matcher(str);
if (m.find()) {
int count = m.groupCount() + 1;
String[] groups = new String[count];
for (int i = 0; i < count; i++) {
groups[i] = m.group(i);
}
return groups;
}
return null;
}
/**
*
* This function is used to apply a regular expression to a piece of text,
* and return a list of matching groups (elements found inside parentheses)
* as a two-dimensional String array. If there are no matches, a <b>null</b>
* value will be returned. If no groups are specified in the regular
* expression, but the sequence matches, a two-dimensional array is still
* returned, but the second dimension is only of length one.<br />
* <br />
* To use the function, first check to see if the result is <b>null</b>. If the
* result is null, then the sequence did not match at all. If the sequence did
* match, a 2D array is returned.<br />
* <br />
* If there are groups (specified by sets of parentheses) in the regular
* expression, then the contents of each will be returned in the array.
* Assuming a loop with counter variable i, element [i][0] of a regular
* expression match returns the entire matching string, and the match groups
* start at element [i][1] (the first group is [i][1], the second [i][2], and
* so on).<br />
* <br />
* The syntax can be found in the reference for Java's <a href=
* "https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html">Pattern</a>
* class. For regular expression syntax, read the
* <a href="https://docs.oracle.com/javase/tutorial/essential/regex/">Java
* Tutorial</a> on the topic.
*
* @webref data:string_functions
* @webBrief This function is used to apply a regular expression to a piece of
* text
* @param str
* the String to be searched
* @param regexp
* the regexp to be used for matching
* @see PApplet#match(String, String)
* @see PApplet#split(String, String)
* @see PApplet#splitTokens(String, String)
* @see PApplet#join(String[], String)
* @see PApplet#trim(String)
*/
static public String[][] matchAll(String str, String regexp) {
Pattern p = matchPattern(regexp);
Matcher m = p.matcher(str);
List<String[]> results = new ArrayList<>();
int count = m.groupCount() + 1;
while (m.find()) {
String[] groups = new String[count];
for (int i = 0; i < count; i++) {
groups[i] = m.group(i);
}
results.add(groups);
}
if (results.isEmpty()) {
return null;
}
String[][] matches = new String[results.size()][count];
for (int i = 0; i < matches.length; i++) {
matches[i] = results.get(i);
}
return matches;
}
//////////////////////////////////////////////////////////////
// CASTING FUNCTIONS, INSERTED BY PREPROC
/**
* <p>Convert an integer to a boolean. Because of how Java handles upgrading
* numbers, this will also cover byte and char (as they will upgrade to
* an int without any sort of explicit cast).</p>
* <p>The preprocessor will convert boolean(what) to parseBoolean(what).</p>
* @return false if 0, true if any other number
*/
static final public boolean parseBoolean(int what) {
return (what != 0);
}
/**
* Convert the string "true" or "false" to a boolean.
* @return true if 'what' is "true" or "TRUE", false otherwise
*/
static final public boolean parseBoolean(String what) {
return Boolean.parseBoolean(what);
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
/**
* Convert an int array to a boolean array. An int equal
* to zero will return false, and any other value will return true.
* @return array of boolean elements
*/
static final public boolean[] parseBoolean(int[] what) {
boolean[] outgoing = new boolean[what.length];
for (int i = 0; i < what.length; i++) {
outgoing[i] = (what[i] != 0);
}
return outgoing;
}
static final public boolean[] parseBoolean(String[] what) {
boolean[] outgoing = new boolean[what.length];
for (int i = 0; i < what.length; i++) {
outgoing[i] = Boolean.parseBoolean(what[i]);
}
return outgoing;
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
static final public byte parseByte(boolean what) {
return what ? (byte)1 : 0;
}
static final public byte parseByte(char what) {
return (byte) what;
}
static final public byte parseByte(int what) {
return (byte) what;
}
static final public byte parseByte(float what) {
return (byte) what;
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
static final public byte[] parseByte(boolean[] what) {
byte[] outgoing = new byte[what.length];
for (int i = 0; i < what.length; i++) {
outgoing[i] = what[i] ? (byte)1 : 0;
}
return outgoing;
}
static final public byte[] parseByte(char[] what) {
byte[] outgoing = new byte[what.length];
for (int i = 0; i < what.length; i++) {
outgoing[i] = (byte) what[i];
}
return outgoing;
}
static final public byte[] parseByte(int[] what) {
byte[] outgoing = new byte[what.length];
for (int i = 0; i < what.length; i++) {
outgoing[i] = (byte) what[i];
}
return outgoing;
}
static final public byte[] parseByte(float[] what) {
byte[] outgoing = new byte[what.length];
for (int i = 0; i < what.length; i++) {
outgoing[i] = (byte) what[i];
}
return outgoing;
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
static final public char parseChar(byte what) {
return (char) (what & 0xff);
}
static final public char parseChar(int what) {
return (char) what;
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
static final public char[] parseChar(byte[] what) {
char[] outgoing = new char[what.length];
for (int i = 0; i < what.length; i++) {
outgoing[i] = (char) (what[i] & 0xff);
}
return outgoing;
}
static final public char[] parseChar(int[] what) {
char[] outgoing = new char[what.length];
for (int i = 0; i < what.length; i++) {
outgoing[i] = (char) what[i];
}
return outgoing;
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
static final public int parseInt(boolean what) {
return what ? 1 : 0;
}
/**
* Note that parseInt() will un-sign a signed byte value.
*/
static final public int parseInt(byte what) {
return what & 0xff;
}
/**
* Note that parseInt('5') is unlike String in the sense that it
* won't return 5, but the ascii value. This is because ((int) someChar)
* returns the ascii value, and parseInt() is just longhand for the cast.
*/
static final public int parseInt(char what) {
return what;
}
/**
* Same as floor(), or an (int) cast.
*/
static final public int parseInt(float what) {
return (int) what;
}
/**
* Parse a String into an int value. Returns 0 if the value is bad.
*/
static final public int parseInt(String what) {
return parseInt(what, 0);
}
/**
* Parse a String to an int, and provide an alternate value that
* should be used when the number is invalid. If there's a decimal place,
* it will be truncated, making this more of a toInt() than parseInt()
* function. This is because the method is used internally for casting.
* Not ideal, but the name was chosen before that clarification was made.
*/
static final public int parseInt(String what, int otherwise) {
try {
int offset = what.indexOf('.');
if (offset == -1) {
return Integer.parseInt(what);
} else {
return Integer.parseInt(what.substring(0, offset));
}
} catch (NumberFormatException e) {
return otherwise;
}
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
static final public int[] parseInt(boolean[] what) {
int[] list = new int[what.length];
for (int i = 0; i < what.length; i++) {
list[i] = what[i] ? 1 : 0;
}
return list;
}
static final public int[] parseInt(byte[] what) { // note this un-signs
int[] list = new int[what.length];
for (int i = 0; i < what.length; i++) {
list[i] = (what[i] & 0xff);
}
return list;
}
static final public int[] parseInt(char[] what) {
int[] list = new int[what.length];
for (int i = 0; i < what.length; i++) {
list[i] = what[i];
}
return list;
}
static public int[] parseInt(float[] what) {
int[] inties = new int[what.length];
for (int i = 0; i < what.length; i++) {
inties[i] = (int)what[i];
}
return inties;
}
/**
* Make an array of int elements from an array of String objects.
* If the String can't be parsed as a number, it will be set to zero.
* <pre>
* String s[] = { "1", "300", "44" };
* int numbers[] = parseInt(s);
* // numbers will contain { 1, 300, 44 }
* </pre>
*/
static public int[] parseInt(String[] what) {
return parseInt(what, 0);
}
/**
* Make an array of int elements from an array of String objects.
* If the String can't be parsed as a number, its entry in the
* array will be set to the value of the "missing" parameter.
* <pre>
* String s[] = { "1", "300", "apple", "44" };
* int numbers[] = parseInt(s, 9999);
* // numbers will contain { 1, 300, 9999, 44 }
* </pre>
*/
static public int[] parseInt(String[] what, int missing) {
int[] output = new int[what.length];
for (int i = 0; i < what.length; i++) {
try {
output[i] = Integer.parseInt(what[i]);
} catch (NumberFormatException e) {
output[i] = missing;
}
}
return output;
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
/**
* Convert an int to a float value. Also handles bytes because of
* Java's rules for upgrading values.
*/
static final public float parseFloat(int what) { // also handles byte
return what;
}
static final public float parseFloat(String what) {
return parseFloat(what, Float.NaN);
}
static final public float parseFloat(String what, float otherwise) {
try {
return Float.parseFloat(what);
} catch (NumberFormatException ignored) { }
return otherwise;
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
static final public float[] parseFloat(byte[] what) {
float[] floaties = new float[what.length];
for (int i = 0; i < what.length; i++) {
floaties[i] = what[i];
}
return floaties;
}
static final public float[] parseFloat(int[] what) {
float[] floaties = new float[what.length];
for (int i = 0; i < what.length; i++) {
floaties[i] = what[i];
}
return floaties;
}
static final public float[] parseFloat(String[] what) {
return parseFloat(what, Float.NaN);
}
static final public float[] parseFloat(String[] what, float missing) {
float[] output = new float[what.length];
for (int i = 0; i < what.length; i++) {
try {
output[i] = Float.parseFloat(what[i]);
} catch (NumberFormatException e) {
output[i] = missing;
}
}
return output;
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
static final public String str(boolean value) {
return String.valueOf(value);
}
static final public String str(byte value) {
return String.valueOf(value);
}
static final public String str(char value) {
return String.valueOf(value);
}
static final public String str(int value) {
return String.valueOf(value);
}
static final public String str(float value) {
return String.valueOf(value);
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
static final public String[] str(boolean[] values) {
String[] s = new String[values.length];
for (int i = 0; i < values.length; i++) s[i] = String.valueOf(values[i]);
return s;
}
static final public String[] str(byte[] values) {
String[] s = new String[values.length];
for (int i = 0; i < values.length; i++) s[i] = String.valueOf(values[i]);
return s;
}
static final public String[] str(char[] values) {
String[] s = new String[values.length];
for (int i = 0; i < values.length; i++) s[i] = String.valueOf(values[i]);
return s;
}
static final public String[] str(int[] values) {
String[] s = new String[values.length];
for (int i = 0; i < values.length; i++) s[i] = String.valueOf(values[i]);
return s;
}
static final public String[] str(float[] values) {
String[] s = new String[values.length];
for (int i = 0; i < values.length; i++) s[i] = String.valueOf(values[i]);
return s;
}
//////////////////////////////////////////////////////////////
// INT NUMBER FORMATTING
static public String nf(float num) {
int inum = (int) num;
if (num == inum) {
return str(inum);
}
return str(num);
}
static public String[] nf(float[] nums) {
String[] outgoing = new String[nums.length];
for (int i = 0; i < nums.length; i++) {
outgoing[i] = nf(nums[i]);
}
return outgoing;
}
/**
* Integer number formatter.
*/
static private NumberFormat int_nf;
static private int int_nf_digits;
static private boolean int_nf_commas;
/**
* Utility function for formatting numbers into strings. There are two
* versions: one for formatting floats, and one for formatting ints. The
* values for the <b>digits</b> and <b>right</b> parameters should always be
* positive integers. The <b>left</b> parameter should be positive or 0. If it
* is zero, only the right side is formatted.<br />
* <br />
* As shown in the above example, <b>nf()</b> is used to add zeros to the left
* and/or right of a number. This is typically for aligning a list of numbers.
* To <em>remove</em> digits from a floating-point number, use the
* <b>int()</b>, <b>ceil()</b>, <b>floor()</b>, or <b>round()</b> functions.
*
* @webref data:string_functions
* @webBrief Utility function for formatting numbers into strings
* @param nums
* the numbers to format
* @param digits
* number of digits to pad with zero
* @see PApplet#nfs(float, int, int)
* @see PApplet#nfp(float, int, int)
* @see PApplet#nfc(float, int)
* @see <a href=
* "https://processing.org/reference/intconvert_.html">int(float)</a>
*/
static public String[] nf(int[] nums, int digits) {
String[] formatted = new String[nums.length];
for (int i = 0; i < formatted.length; i++) {
formatted[i] = nf(nums[i], digits);
}
return formatted;
}
/**
* @param num the number to format
*/
static public String nf(int num, int digits) {
if ((int_nf != null) &&
(int_nf_digits == digits) &&
!int_nf_commas) {
return int_nf.format(num);
}
int_nf = NumberFormat.getInstance();
int_nf.setGroupingUsed(false); // no commas
int_nf_commas = false;
int_nf.setMinimumIntegerDigits(digits);
int_nf_digits = digits;
return int_nf.format(num);
}
/**
* Utility function for formatting numbers into strings and placing
* appropriate commas to mark units of 1000. There are four versions: one for
* formatting ints, one for formatting an array of ints, one for formatting
* floats, and one for formatting an array of floats.<br />
* <br />
* The value for the <b>right</b> parameter should always be a positive
* integer.<br />
* <br />
* For a non-US locale, this will insert periods instead of commas,
* or whatever is appropriate for that region.
*
* @webref data:string_functions
* @webBrief Utility function for formatting numbers into strings and placing
* appropriate commas to mark units of 1000
* @param nums
* the numbers to format
* @see PApplet#nf(float, int, int)
* @see PApplet#nfp(float, int, int)
* @see PApplet#nfs(float, int, int)
*/
static public String[] nfc(int[] nums) {
String[] formatted = new String[nums.length];
for (int i = 0; i < formatted.length; i++) {
formatted[i] = nfc(nums[i]);
}
return formatted;
}
/**
* @param num the number to format
*/
static public String nfc(int num) {
if ((int_nf != null) &&
(int_nf_digits == 0) &&
int_nf_commas) {
return int_nf.format(num);
}
int_nf = NumberFormat.getInstance();
int_nf.setGroupingUsed(true);
int_nf_commas = true;
int_nf.setMinimumIntegerDigits(0);
int_nf_digits = 0;
return int_nf.format(num);
}
/**
* Utility function for formatting numbers into strings. Similar to
* <b>nf()</b> but leaves a blank space in front of positive numbers, so
* they align with negative numbers in spite of the minus symbol. There are
* two versions, one for formatting floats and one for formatting ints. The
* values for the <b>digits</b>, <b>left</b>, and <b>right</b> parameters
* should always be positive integers.
*
* @webref data:string_functions
* @webBrief Utility function for formatting numbers into strings
* @param num the number to format
* @param digits number of digits to pad with zeroes
* @see PApplet#nf(float, int, int)
* @see PApplet#nfp(float, int, int)
* @see PApplet#nfc(float, int)
*/
static public String nfs(int num, int digits) {
return (num < 0) ? nf(num, digits) : (' ' + nf(num, digits));
}
/**
* @param nums the numbers to format
*/
static public String[] nfs(int[] nums, int digits) {
String[] formatted = new String[nums.length];
for (int i = 0; i < formatted.length; i++) {
formatted[i] = nfs(nums[i], digits);
}
return formatted;
}
/**
* Utility function for formatting numbers into strings. Similar to <b>nf()</b>
* but puts a "+" in front of positive numbers and a "-" in front of negative
* numbers. There are two versions: one for formatting floats, and one for
* formatting ints. The values for the <b>digits</b>, <b>left</b>, and
* <b>right</b> parameters should always be positive integers.
*
* @webref data:string_functions
* @webBrief Utility function for formatting numbers into strings
* @param num
* the number to format
* @param digits
* number of digits to pad with zeroes
* @see PApplet#nf(float, int, int)
* @see PApplet#nfs(float, int, int)
* @see PApplet#nfc(float, int)
*/
static public String nfp(int num, int digits) {
return (num < 0) ? nf(num, digits) : ('+' + nf(num, digits));
}
/**
* @param nums the numbers to format
*/
static public String[] nfp(int[] nums, int digits) {
String[] formatted = new String[nums.length];
for (int i = 0; i < formatted.length; i++) {
formatted[i] = nfp(nums[i], digits);
}
return formatted;
}
//////////////////////////////////////////////////////////////
// FLOAT NUMBER FORMATTING
static private NumberFormat float_nf;
static private int float_nf_left, float_nf_right;
static private boolean float_nf_commas;
/**
* @param left number of digits to the left of the decimal point
* @param right number of digits to the right of the decimal point
*/
static public String[] nf(float[] nums, int left, int right) {
String[] formatted = new String[nums.length];
for (int i = 0; i < formatted.length; i++) {
formatted[i] = nf(nums[i], left, right);
}
return formatted;
}
static public String nf(float num, int left, int right) {
if ((float_nf != null) &&
(float_nf_left == left) &&
(float_nf_right == right) &&
!float_nf_commas) {
return float_nf.format(num);
}
float_nf = NumberFormat.getInstance();
float_nf.setGroupingUsed(false);
float_nf_commas = false;
if (left != 0) float_nf.setMinimumIntegerDigits(left);
if (right != 0) {
float_nf.setMinimumFractionDigits(right);
float_nf.setMaximumFractionDigits(right);
}
float_nf_left = left;
float_nf_right = right;
return float_nf.format(num);
}
/**
* @param right number of digits to the right of the decimal point
*/
static public String[] nfc(float[] nums, int right) {
String[] formatted = new String[nums.length];
for (int i = 0; i < formatted.length; i++) {
formatted[i] = nfc(nums[i], right);
}
return formatted;
}
static public String nfc(float num, int right) {
if ((float_nf != null) &&
(float_nf_left == 0) &&
(float_nf_right == right) &&
float_nf_commas) {
return float_nf.format(num);
}
float_nf = NumberFormat.getInstance();
float_nf.setGroupingUsed(true);
float_nf_commas = true;
if (right != 0) {
float_nf.setMinimumFractionDigits(right);
float_nf.setMaximumFractionDigits(right);
}
float_nf_left = 0;
float_nf_right = right;
return float_nf.format(num);
}
/**
* @param left the number of digits to the left of the decimal point
* @param right the number of digits to the right of the decimal point
*/
static public String[] nfs(float[] nums, int left, int right) {
String[] formatted = new String[nums.length];
for (int i = 0; i < formatted.length; i++) {
formatted[i] = nfs(nums[i], left, right);
}
return formatted;
}
static public String nfs(float num, int left, int right) {
return (num < 0) ? nf(num, left, right) : (' ' + nf(num, left, right));
}
/**
* @param left the number of digits to the left of the decimal point
* @param right the number of digits to the right of the decimal point
*/
static public String[] nfp(float[] nums, int left, int right) {
String[] formatted = new String[nums.length];
for (int i = 0; i < formatted.length; i++) {
formatted[i] = nfp(nums[i], left, right);
}
return formatted;
}
static public String nfp(float num, int left, int right) {
return (num < 0) ? nf(num, left, right) : ('+' + nf(num, left, right));
}
//////////////////////////////////////////////////////////////
// HEX/BINARY CONVERSION
/**
*
* Converts an <b>int</b>, <b>byte</b>, <b>char</b>, or <b>color</b> to a
* <b>String</b> containing the equivalent hexadecimal notation. For example,
* the <b>color</b> value produced by <b>color(0, 102, 153)</b> will convert
* to the <b>String</b> value <b>"FF006699"</b>. This function can help make
* your geeky debugging sessions much happier.<br />
* <br />
* Note that the maximum number of digits is 8, because an <b>int</b> value
* can only represent up to 32 bits. Specifying more than 8 digits will not
* increase the length of the <b>String</b> further.
*
* @webref data:conversion
* @webBrief Converts a <b>byte</b>, <b>char</b>, <b>int</b>, or <b>color</b> to a <b>String</b> containing the
* equivalent hexadecimal notation
* @param value
* the value to convert
* @see PApplet#unhex(String)
* @see PApplet#binary(byte)
* @see PApplet#unbinary(String)
*/
static final public String hex(byte value) {
return hex(value, 2);
}
static final public String hex(char value) {
return hex(value, 4);
}
static final public String hex(int value) {
return hex(value, 8);
}
/**
* @param digits the number of digits (maximum 8)
*/
static final public String hex(int value, int digits) {
String stuff = Integer.toHexString(value).toUpperCase();
if (digits > 8) {
digits = 8;
}
int length = stuff.length();
if (length > digits) {
return stuff.substring(length - digits);
} else if (length < digits) {
return "00000000".substring(8 - (digits-length)) + stuff;
}
return stuff;
}
/**
*
* Converts a <b>String</b> representation of a hexadecimal number to its
* equivalent integer value.
*
* @webref data:conversion
* @webBrief Converts a <b>String</b> representation of a hexadecimal number to its
* equivalent integer value
* @param value
* String to convert to an integer
* @see PApplet#hex(int, int)
* @see PApplet#binary(byte)
* @see PApplet#unbinary(String)
*/
static final public int unhex(String value) {
// has to parse as a Long so that it'll work for numbers bigger than 2^31
return (int) (Long.parseLong(value, 16));
}
//
/**
* Returns a String that contains the binary value of a byte.
* The returned value will always have 8 digits.
*/
static final public String binary(byte value) {
return binary(value, 8);
}
/**
* Returns a String that contains the binary value of a char.
* The returned value will always have 16 digits because chars
* are two bytes long.
*/
static final public String binary(char value) {
return binary(value, 16);
}
/**
* Returns a String that contains the binary value of an int. The length
* depends on the size of the number itself. If you want a specific number
* of digits use binary(int what, int digits) to specify how many.
*/
static final public String binary(int value) {
return binary(value, 32);
}
/*
* Returns a String that contains the binary value of an int.
* The digits parameter determines how many digits will be used.
*/
/**
*
* Converts an <b>int</b>, <b>byte</b>, <b>char</b>, or <b>color</b> to a
* <b>String</b> containing the equivalent binary notation. For example, the
* <b>color</b> value produced by <b>color(0, 102, 153, 255)</b> will convert
* to the <b>String</b> value <b>"11111111000000000110011010011001"</b>. This
* function can help make your geeky debugging sessions much happier.<br />
* <br />
* Note that the maximum number of digits is 32, because an <b>int</b> value
* can only represent up to 32 bits. Specifying more than 32 digits will have
* no effect.
*
* @webref data:conversion
* @webBrief Converts an <b>int</b>, <b>byte</b>, <b>char</b>, or <b>color</b> to a
* <b>String</b> containing the equivalent binary notation
* @param value
* value to convert
* @param digits
* number of digits to return
* @see PApplet#unbinary(String)
* @see PApplet#hex(int,int)
* @see PApplet#unhex(String)
*/
static final public String binary(int value, int digits) {
String stuff = Integer.toBinaryString(value);
if (digits > 32) {
digits = 32;
}
int length = stuff.length();
if (length > digits) {
return stuff.substring(length - digits);
} else if (length < digits) {
int offset = 32 - (digits-length);
return "00000000000000000000000000000000".substring(offset) + stuff;
}
return stuff;
}
/**
*
* Converts a <b>String</b> representation of a binary number to its equivalent
* integer value. For example, <b>unbinary("00001000")</b> will return
* <b>8</b>.
*
* @webref data:conversion
* @webBrief Converts a <b>String</b> representation of a binary number to its
* equivalent <b>integer</b> value
* @param value
* String to convert to an integer
* @see PApplet#binary(byte)
* @see PApplet#hex(int,int)
* @see PApplet#unhex(String)
*/
static final public int unbinary(String value) {
return Integer.parseInt(value, 2);
}
//////////////////////////////////////////////////////////////
// COLOR FUNCTIONS
// moved here so that they can work without
// the graphics actually being instantiated (outside setup)
/**
*
* Creates colors for storing in variables of the <b>color</b> datatype. The
* parameters are interpreted as RGB or HSB values depending on the current
* <b>colorMode()</b>. The default mode is RGB values from 0 to 255 and,
* therefore, <b>color(255, 204, 0)</b> will return a bright yellow color (see
* the first example above).<br />
* <br />
* Note that if only one value is provided to <b>color()</b>, it will be
* interpreted as a grayscale value. Add a second value, and it will be used
* for alpha transparency. When three values are specified, they are
* interpreted as either RGB or HSB values. Adding a fourth value applies
* alpha transparency.<br />
* <br />
* Note that when using hexadecimal notation, it is not necessary to use
* <b>color()</b>, as in: <b>color c = #006699</b><br />
* <br />
* More about how colors are stored can be found in the reference for the
* <a href="color_datatype.html">color</a> datatype.
*
* @webref color:creating & reading
* @webBrief Creates colors for storing in variables of the <b>color</b>
* datatype
* @param gray
* number specifying value between white and black
* @see PApplet#colorMode(int)
*/
public final int color(int gray) {
if (g == null) {
if (gray > 255) gray = 255; else if (gray < 0) gray = 0;
return 0xff000000 | (gray << 16) | (gray << 8) | gray;
}
return g.color(gray);
}
/**
* @nowebref
* @param fgray number specifying value between white and black
*/
public final int color(float fgray) {
if (g == null) {
int gray = (int) fgray;
if (gray > 255) gray = 255; else if (gray < 0) gray = 0;
return 0xff000000 | (gray << 16) | (gray << 8) | gray;
}
return g.color(fgray);
}
/**
* As of 0116 this also takes color(#FF8800, alpha)
* @param alpha relative to current color range
*/
public final int color(int gray, int alpha) {
if (g == null) {
if (alpha > 255) alpha = 255; else if (alpha < 0) alpha = 0;
if (gray > 255) {
// then assume this is actually a #FF8800
return (alpha << 24) | (gray & 0xFFFFFF);
} else {
//if (gray > 255) gray = 255; else if (gray < 0) gray = 0;
return (alpha << 24) | (gray << 16) | (gray << 8) | gray;
}
}
return g.color(gray, alpha);
}
/**
* @nowebref
*/
public final int color(float fgray, float falpha) {
if (g == null) {
int gray = (int) fgray;
int alpha = (int) falpha;
if (gray > 255) gray = 255; else if (gray < 0) gray = 0;
if (alpha > 255) alpha = 255; else if (alpha < 0) alpha = 0;
return (alpha << 24) | (gray << 16) | (gray << 8) | gray;
}
return g.color(fgray, falpha);
}
/**
* @param v1 red or hue values relative to the current color range
* @param v2 green or saturation values relative to the current color range
* @param v3 blue or brightness values relative to the current color range
*/
public final int color(int v1, int v2, int v3) {
if (g == null) {
if (v1 > 255) v1 = 255; else if (v1 < 0) v1 = 0;
if (v2 > 255) v2 = 255; else if (v2 < 0) v2 = 0;
if (v3 > 255) v3 = 255; else if (v3 < 0) v3 = 0;
return 0xff000000 | (v1 << 16) | (v2 << 8) | v3;
}
return g.color(v1, v2, v3);
}
public final int color(int v1, int v2, int v3, int alpha) {
if (g == null) {
if (alpha > 255) alpha = 255; else if (alpha < 0) alpha = 0;
if (v1 > 255) v1 = 255; else if (v1 < 0) v1 = 0;
if (v2 > 255) v2 = 255; else if (v2 < 0) v2 = 0;
if (v3 > 255) v3 = 255; else if (v3 < 0) v3 = 0;
return (alpha << 24) | (v1 << 16) | (v2 << 8) | v3;
}
return g.color(v1, v2, v3, alpha);
}
public final int color(float v1, float v2, float v3) {
if (g == null) {
if (v1 > 255) v1 = 255; else if (v1 < 0) v1 = 0;
if (v2 > 255) v2 = 255; else if (v2 < 0) v2 = 0;
if (v3 > 255) v3 = 255; else if (v3 < 0) v3 = 0;
return 0xff000000 | ((int)v1 << 16) | ((int)v2 << 8) | (int)v3;
}
return g.color(v1, v2, v3);
}
public final int color(float v1, float v2, float v3, float alpha) {
if (g == null) {
if (alpha > 255) alpha = 255; else if (alpha < 0) alpha = 0;
if (v1 > 255) v1 = 255; else if (v1 < 0) v1 = 0;
if (v2 > 255) v2 = 255; else if (v2 < 0) v2 = 0;
if (v3 > 255) v3 = 255; else if (v3 < 0) v3 = 0;
return ((int)alpha << 24) | ((int)v1 << 16) | ((int)v2 << 8) | (int)v3;
}
return g.color(v1, v2, v3, alpha);
}
/**
*
* Calculates a new <b>color</b> that is a blend of two other colors. The <b>amt</b> parameter
* controls the amount of each color to use where an amount of 0.0 will produce
* the first color, 1.0 will return the second color, and 0.5 is halfway in
* between. Values between 0.0 and 1.0 will interpolate between the two colors in
* that proportion. <br />
* An amount below 0 will be treated as 0. Likewise, amounts above 1 will be
* capped at 1. This is different from the behavior of <b>lerp()</b>, but necessary
* because otherwise numbers outside the range will produce strange and
* unexpected colors.
*
* @webref color:creating & reading
* @webBrief Calculates a <b>color</b> or <b>colors</b> between two <b>colors</b> at a specific
* increment
* @usage web_application
* @param c1
* interpolate from this color
* @param c2
* interpolate to this color
* @param amt
* between 0.0 and 1.0
* @see PImage#blendColor(int, int, int)
* @see PGraphics#color(float, float, float, float)
* @see PApplet#lerp(float, float, float)
*/
public int lerpColor(int c1, int c2, float amt) {
if (g != null) {
return g.lerpColor(c1, c2, amt);
}
// use the default mode (RGB) if lerpColor is called before setup()
return PGraphics.lerpColor(c1, c2, amt, RGB);
}
static public int blendColor(int c1, int c2, int mode) {
return PImage.blendColor(c1, c2, mode);
}
//////////////////////////////////////////////////////////////
/*
public void frameMoved(int x, int y) {
if (!fullScreen) {
System.err.println(EXTERNAL_MOVE + " " + x + " " + y);
System.err.flush(); // doesn't seem to help or hurt
}
}
public void frameResized(int w, int h) {
}
*/
//////////////////////////////////////////////////////////////
// WINDOW METHODS
Map<String, WindowEventValuePairs> windowEventQueue = new ConcurrentHashMap<>();
public void windowTitle(String title) {
surface.setTitle(title);
}
public void windowResize(int newWidth, int newHeight) {
surface.setSize(newWidth, newHeight);
}
/**
* Internal use only: called by Surface objects to queue a resize
* event to call windowResized() when it's safe, which is after
* the beginDraw() call and before the draw(). Note that this is
* only the notification that the resize has happened.
*/
public void postWindowResized(int newWidth, int newHeight) {
windowEventQueue.put("wh", new WindowEventValuePairs(newWidth, newHeight));
}
/** Called when window is resized. */
public void windowResized() { }
public void windowResizable(boolean resizable) {
surface.setResizable(resizable);
}
public void windowMove(int x, int y) {
surface.setLocation(x, y);
}
/**
* When running from the PDE, this saves the window position for
* next time the sketch is run. Needs to remain a separate method
* so that it can be overridden by Python Mode.
*/
public void frameMoved(int newX, int newY) {
System.err.println(EXTERNAL_MOVE + " " + newX + " " + newY);
System.err.flush(); // doesn't seem to help or hurt
}
/**
* Internal use only: called by Surface objects to queue a position
* event to call windowPositioned() when it's safe, which is after
* the beginDraw() call and before the draw(). Note that this is
* only the notification that the window is in a new position.
*/
public void postWindowMoved(int newX, int newY) {
if (external && !fullScreen) {
frameMoved(newX, newY);
}
windowEventQueue.put("xy", new WindowEventValuePairs(newX, newY));
}
/** Called when the window is moved */
public void windowMoved() { }
private void dequeueWindowEvents() {
if (windowEventQueue.containsKey("xy")) {
WindowEventValuePairs xy = windowEventQueue.remove("xy");
windowX = xy.num1;
windowY = xy.num2;
windowMoved();
}
if (windowEventQueue.containsKey("wh")) {
WindowEventValuePairs wh = windowEventQueue.remove("wh");
windowResized();
}
}
protected
|
and
|
java
|
elastic__elasticsearch
|
x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/analyze/RandomBlobContent.java
|
{
"start": 669,
"end": 3430
}
|
class ____ {
static final int BUFFER_SIZE = 8191; // nearly 8kB, but prime, so unlikely to be aligned with any other block sizes
final byte[] buffer = new byte[BUFFER_SIZE];
private final BooleanSupplier isCancelledSupplier;
private final AtomicReference<Runnable> onLastRead;
private final String repositoryName;
/**
* @param repositoryName The name of the repository being tested, for use in exception messages.
* @param seed RNG seed to use for its contents.
* @param isCancelledSupplier Predicate that causes reads to throw a {@link RepositoryVerificationException}, allowing for fast failure
* on cancellation.
* @param onLastRead Runs when a {@code read()} call returns the last byte of the file, or on {@code close()} if the file was not fully
* read. Only runs once even if the last byte is read multiple times using {@code mark()} and {@code reset()}.
*/
RandomBlobContent(String repositoryName, long seed, BooleanSupplier isCancelledSupplier, Runnable onLastRead) {
this.repositoryName = repositoryName;
this.isCancelledSupplier = isCancelledSupplier;
this.onLastRead = new AtomicReference<>(onLastRead);
new Random(seed).nextBytes(buffer);
}
long getChecksum(long checksumRangeStart, long checksumRangeEnd) {
assert 0 <= checksumRangeStart && checksumRangeStart <= checksumRangeEnd;
final CRC32 crc32 = new CRC32();
final long startBlock = checksumRangeStart / buffer.length;
final long endBlock = (checksumRangeEnd - 1) / buffer.length;
if (startBlock == endBlock) {
crc32.update(
buffer,
Math.toIntExact(checksumRangeStart % buffer.length),
Math.toIntExact(checksumRangeEnd - checksumRangeStart)
);
} else {
final int bufferStart = Math.toIntExact(checksumRangeStart % buffer.length);
crc32.update(buffer, bufferStart, buffer.length - bufferStart);
for (long block = startBlock + 1; block < endBlock; block++) {
crc32.update(buffer);
}
crc32.update(buffer, 0, Math.toIntExact((checksumRangeEnd - 1) % buffer.length) + 1);
}
return crc32.getValue();
}
void ensureNotCancelled(final String position) {
if (isCancelledSupplier.getAsBoolean()) {
throw new RepositoryVerificationException(repositoryName, "blob upload cancelled at position [" + position + "]");
}
}
void onLastRead() {
final Runnable runnable = onLastRead.getAndSet(null);
if (runnable != null) {
runnable.run();
}
}
}
|
RandomBlobContent
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/streaming/runtime/io/InputGateUtil.java
|
{
"start": 1387,
"end": 1930
}
|
class ____ {
public static InputGate createInputGate(List<IndexedInputGate> inputGates) {
if (inputGates.size() <= 0) {
throw new RuntimeException("No such input gate.");
}
if (inputGates.size() == 1) {
return inputGates.get(0);
} else {
return new UnionInputGate(inputGates.toArray(new IndexedInputGate[0]));
}
}
/** Private constructor to prevent instantiation. */
private InputGateUtil() {
throw new RuntimeException();
}
}
|
InputGateUtil
|
java
|
grpc__grpc-java
|
core/src/main/java/io/grpc/internal/ChannelTracer.java
|
{
"start": 1424,
"end": 5074
}
|
class ____ than on this internal class.
static final Logger logger = Logger.getLogger(ChannelLogger.class.getName());
private final Object lock = new Object();
private final InternalLogId logId;
@GuardedBy("lock")
@Nullable
private final Collection<Event> events;
private final long channelCreationTimeNanos;
@GuardedBy("lock")
private int eventsLogged;
/**
* Creates a channel tracer and log the creation event of the underlying channel.
*
* @param logId logId will be prepended to the logs logged to Java logger
* @param maxEvents maximum number of events that are retained in memory. If not a positive
* number no events will be retained, but they will still be sent to the Java logger.
* @param channelCreationTimeNanos the creation time of the entity being traced
* @param description a description of the entity being traced
*/
ChannelTracer(
InternalLogId logId, final int maxEvents, long channelCreationTimeNanos, String description) {
checkNotNull(description, "description");
this.logId = checkNotNull(logId, "logId");
if (maxEvents > 0) {
events = new ArrayDeque<Event>() {
@GuardedBy("lock")
@Override
public boolean add(Event event) {
if (size() == maxEvents) {
removeFirst();
}
eventsLogged++;
return super.add(event);
}
};
} else {
events = null;
}
this.channelCreationTimeNanos = channelCreationTimeNanos;
reportEvent(new ChannelTrace.Event.Builder()
.setDescription(description + " created")
.setSeverity(ChannelTrace.Event.Severity.CT_INFO)
// passing the timestamp in as a parameter instead of computing it right here because when
// parent channel and subchannel both report the same event of the subchannel (e.g. creation
// event of the subchannel) we want the timestamps to be exactly the same.
.setTimestampNanos(channelCreationTimeNanos)
.build());
}
void reportEvent(Event event) {
Level logLevel;
switch (event.severity) {
case CT_ERROR:
logLevel = Level.FINE;
break;
case CT_WARNING:
logLevel = Level.FINER;
break;
default:
logLevel = Level.FINEST;
}
traceOnly(event);
logOnly(logId, logLevel, event.description);
}
boolean isTraceEnabled() {
synchronized (lock) {
return events != null;
}
}
void traceOnly(Event event) {
synchronized (lock) {
if (events != null) {
events.add(event);
}
}
}
static void logOnly(InternalLogId logId, Level logLevel, String msg) {
if (logger.isLoggable(logLevel)) {
LogRecord lr = new LogRecord(logLevel, "[" + logId + "] " + msg);
// No resource bundle as gRPC is not localized.
lr.setLoggerName(logger.getName());
lr.setSourceClassName(logger.getName());
// Both logger methods are called log in ChannelLogger.
lr.setSourceMethodName("log");
logger.log(lr);
}
}
InternalLogId getLogId() {
return logId;
}
void updateBuilder(ChannelStats.Builder builder) {
List<Event> eventsSnapshot;
int eventsLoggedSnapshot;
synchronized (lock) {
if (events == null) {
return;
}
eventsLoggedSnapshot = eventsLogged;
eventsSnapshot = new ArrayList<>(events);
}
builder.setChannelTrace(new ChannelTrace.Builder()
.setNumEventsLogged(eventsLoggedSnapshot)
.setCreationTimeNanos(channelCreationTimeNanos)
.setEvents(eventsSnapshot)
.build());
}
}
|
rather
|
java
|
micronaut-projects__micronaut-core
|
aop/src/main/java/io/micronaut/aop/InterceptorRegistry.java
|
{
"start": 896,
"end": 1023
}
|
interface ____ looking up interceptors from the bean context.
*
* @author graemerocher
* @since 3.0.0
*/
@NullMarked
public
|
for
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converter/hbm/Money.java
|
{
"start": 217,
"end": 478
}
|
class ____ {
private long cents;
public Money(long cents) {
this.cents = cents;
}
public long getCents() {
return cents;
}
public void setCents(long cents) {
this.cents = cents;
}
}
//end::basic-hbm-attribute-converter-mapping-money-example[]
|
Money
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/engine/jdbc/internal/MutationStatementPreparerImpl.java
|
{
"start": 614,
"end": 2582
}
|
class ____ implements MutationStatementPreparer {
private final JdbcCoordinatorImpl jdbcCoordinator;
private final JdbcServices jdbcServices;
public MutationStatementPreparerImpl(JdbcCoordinatorImpl jdbcCoordinator, JdbcServices jdbcServices) {
this.jdbcCoordinator = jdbcCoordinator;
this.jdbcServices = jdbcServices;
}
@Override
public PreparedStatement prepareStatement(String sql, boolean isCallable) {
return buildPreparedStatementPreparationTemplate( sql, isCallable ).prepareStatement();
}
private StatementPreparationTemplate buildPreparedStatementPreparationTemplate(String sql, final boolean isCallable) {
return new StatementPreparationTemplate( sql ) {
@Override
protected PreparedStatement doPrepare() throws SQLException {
//noinspection resource
return isCallable
? connection().prepareCall( sql )
: connection().prepareStatement( sql );
}
};
}
@Override
public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) {
if ( autoGeneratedKeys == PreparedStatement.RETURN_GENERATED_KEYS ) {
checkAutoGeneratedKeysSupportEnabled();
}
return new StatementPreparationTemplate( sql ) {
public PreparedStatement doPrepare() throws SQLException {
//noinspection resource
return connection().prepareStatement( sql, autoGeneratedKeys );
}
}.prepareStatement();
}
private void checkAutoGeneratedKeysSupportEnabled() {
if ( ! settings().isGetGeneratedKeysEnabled() ) {
throw new AssertionFailure( "getGeneratedKeys() support is not enabled" );
}
}
@Override
public PreparedStatement prepareStatement(String sql, String[] columnNames) {
checkAutoGeneratedKeysSupportEnabled();
return new StatementPreparationTemplate( sql ) {
public PreparedStatement doPrepare() throws SQLException {
//noinspection resource
return connection().prepareStatement( sql, columnNames );
}
}.prepareStatement();
}
private abstract
|
MutationStatementPreparerImpl
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/ComponentDependenciesTest.java
|
{
"start": 4596,
"end": 4860
}
|
interface ____ {",
" String getString();",
"}");
Source interfaceTwo =
CompilerTests.javaSource(
"test.Two",
"package test;",
"import javax.annotation.Nullable;",
"",
"
|
One
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/compositefk/OneToManyEmbeddedIdFKTest.java
|
{
"start": 1083,
"end": 3649
}
|
class ____ {
@BeforeEach
public void setUp(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
PK superUserKey = new PK( 1, "Fab" );
SystemUser superUser = new SystemUser( superUserKey, "Fab" );
PK userKey = new PK( 2, "Andrea" );
SystemUser user = new SystemUser( userKey, "Andrea" );
System system = new System( 1, "sub1" );
system.addUser( superUser );
system.addUser( user );
session.persist( superUser );
session.persist( user );
session.persist( system );
}
);
}
@AfterEach
public void tearDown(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
public void testGet(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
System system = session.get( System.class, 1 );
assertThat( system, is( notNullValue() ) );
}
);
}
@Test
public void testHqlQuery(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
System system = (System) session.createQuery( "from System e where e.id = :id" )
.setParameter( "id", 1 ).uniqueResult();
assertThat( system, is( notNullValue() ) );
assertThat( system.getUsers().size(), is( 2 ) );
}
);
}
@Test
public void testHqlJoin(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
System system = session.createQuery( "from System e join e.users where e.id = :id", System.class )
.setParameter( "id", 1 ).uniqueResult();
assertThat( system, is( notNullValue() ) );
assertThat( system.getUsers().size(), is( 2 ) );
}
);
}
@Test
public void testHqlJoinFetch(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
System system = session.createQuery(
"from System e join fetch e.users where e.id = :id",
System.class
)
.setParameter( "id", 1 ).uniqueResult();
assertThat( system, is( notNullValue() ) );
assertThat( system.getUsers().size(), is( 2 ) );
}
);
}
@Test
public void testEmbeddedIdParameter(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
PK superUserKey = new PK( 1, "Fab" );
System system = session.createQuery(
"from System e join fetch e.users u where u.id = :id",
System.class
).setParameter( "id", superUserKey ).uniqueResult();
assertThat( system, is( notNullValue() ) );
assertThat( system.getUsers().size(), is( 1 ) );
}
);
}
@Entity(name = "System")
@Table( name = "systems" )
public static
|
OneToManyEmbeddedIdFKTest
|
java
|
apache__camel
|
components/camel-disruptor/src/test/java/org/apache/camel/component/disruptor/vm/DisruptorVmWaitForTaskCompleteTest.java
|
{
"start": 1167,
"end": 2861
}
|
class ____ extends AbstractVmTestSupport {
@Test
void testInOut() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Bye World");
String out = template2.requestBody("direct:start", "Hello World", String.class);
assertEquals("Bye World", out);
MockEndpoint.assertIsSatisfied(context);
}
@Test
void testInOnly() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Bye World");
// we send an in only but we use Always to wait for it to complete
// and since the route changes the payload we can get the response anyway
Exchange out = template2.send("direct:start", new Processor() {
public void process(Exchange exchange) {
exchange.getIn().setBody("Hello World");
exchange.setPattern(ExchangePattern.InOnly);
}
});
assertEquals("Bye World", out.getIn().getBody());
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("disruptor-vm:foo?waitForTaskToComplete=Always").transform(constant("Bye World"))
.to("mock:result");
}
};
}
@Override
protected RouteBuilder createRouteBuilderForSecondContext() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("disruptor-vm:foo?waitForTaskToComplete=Always");
}
};
}
}
|
DisruptorVmWaitForTaskCompleteTest
|
java
|
resilience4j__resilience4j
|
resilience4j-spring/src/test/java/io/github/resilience4j/ratelimiter/configure/RateLimiterInitializationInAspectTest.java
|
{
"start": 1132,
"end": 2310
}
|
class ____ {
@Bean
public RateLimiterRegistry rateLimiterRegistry() {
RateLimiterConfig backendRateLimiterConfig = RateLimiterConfig.custom()
.limitForPeriod(1)
.limitRefreshPeriod(Duration.ofSeconds(10))
.timeoutDuration(Duration.ofMillis(1))
.build();
return RateLimiterRegistry.custom()
.withRateLimiterConfig(RateLimiterConfig.ofDefaults())
.addRateLimiterConfig(BACKEND, backendRateLimiterConfig)
.build();
}
}
@Autowired
@Qualifier("rateLimiterDummyService")
TestDummyService testDummyService;
@Autowired
RateLimiterRegistry registry;
@Before
public void setUp() {
// ensure no rate limiters are initialized
assertThat(registry.getAllRateLimiters()).isEmpty();
}
@Test
public void testCorrectConfigIsUsedInAspect() {
// one successful call within 10s
assertThat(testDummyService.syncSuccess()).isEqualTo("ok");
assertThat(testDummyService.syncSuccess()).isEqualTo("recovered");
}
}
|
TestConfig
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/common/util/BytesRefHash.java
|
{
"start": 9165,
"end": 9357
}
|
class ____ {
private final BytesRef intermediate = new BytesRef();
public long find(BytesRef key) {
return threadSafeFind(key, intermediate);
}
}
}
|
Finder
|
java
|
elastic__elasticsearch
|
build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceIsFalse.java
|
{
"start": 683,
"end": 1101
}
|
class ____ extends ReplaceTextual {
public ReplaceIsFalse(String valueToBeReplaced, SerializableJsonNode<TextNode> replacementNode) {
super("is_false", valueToBeReplaced, replacementNode);
}
public ReplaceIsFalse(String valueToBeReplaced, SerializableJsonNode<TextNode> replacementNode, String testName) {
super("is_false", valueToBeReplaced, replacementNode, testName);
}
}
|
ReplaceIsFalse
|
java
|
elastic__elasticsearch
|
modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/StoreBraceNode.java
|
{
"start": 616,
"end": 1123
}
|
class ____ extends UnaryNode {
/* ---- begin visitor ---- */
@Override
public <Scope> void visit(IRTreeVisitor<Scope> irTreeVisitor, Scope scope) {
irTreeVisitor.visitStoreBrace(this, scope);
}
@Override
public <Scope> void visitChildren(IRTreeVisitor<Scope> irTreeVisitor, Scope scope) {
getChildNode().visit(irTreeVisitor, scope);
}
/* ---- end visitor ---- */
public StoreBraceNode(Location location) {
super(location);
}
}
|
StoreBraceNode
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/core/type/classreading/MergedAnnotationReadingVisitor.java
|
{
"start": 4709,
"end": 6060
}
|
class ____ extends AnnotationVisitor {
private final List<Object> elements = new ArrayList<>();
private final Consumer<Object[]> consumer;
ArrayVisitor(Consumer<Object[]> consumer) {
super(SpringAsmInfo.ASM_VERSION);
this.consumer = consumer;
}
@Override
public void visit(String name, Object value) {
if (value instanceof Type type) {
value = type.getClassName();
}
this.elements.add(value);
}
@Override
public void visitEnum(String name, String descriptor, String value) {
MergedAnnotationReadingVisitor.this.visitEnum(descriptor, value, this.elements::add);
}
@Override
public @Nullable AnnotationVisitor visitAnnotation(String name, String descriptor) {
return MergedAnnotationReadingVisitor.this.visitAnnotation(descriptor, this.elements::add);
}
@Override
public void visitEnd() {
Class<?> componentType = getComponentType();
Object[] array = (Object[]) Array.newInstance(componentType, this.elements.size());
this.consumer.accept(this.elements.toArray(array));
}
private Class<?> getComponentType() {
if (this.elements.isEmpty()) {
return Object.class;
}
Object firstElement = this.elements.get(0);
if (firstElement instanceof Enum<?> enumeration) {
return enumeration.getDeclaringClass();
}
return firstElement.getClass();
}
}
}
|
ArrayVisitor
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-gcp/src/main/java/org/apache/hadoop/fs/gs/GoogleCloudStorageItemInfo.java
|
{
"start": 1370,
"end": 2469
}
|
class ____ {
// Info about the root of GCS namespace.
public static final GoogleCloudStorageItemInfo ROOT_INFO =
new GoogleCloudStorageItemInfo(StorageResourceId.ROOT,
/* creationTime= */ 0,
/* modificationTime= */ 0,
/* size= */ 0,
/* location= */ null,
/* storageClass= */ null,
/* contentType= */ null,
/* contentEncoding= */ null,
/* metadata= */ null,
/* contentGeneration= */ 0,
/* metaGeneration= */ 0,
/* verificationAttributes= */ null);
/**
* Factory method for creating a GoogleCloudStorageItemInfo for a bucket.
*
* @param resourceId Resource ID that identifies a bucket
* @param creationTime Time when a bucket was created (milliseconds since January 1, 1970
* UTC).
* @param modificationTime Time when a bucket was last modified (milliseconds since January 1,
* 1970 UTC).
* @param location Location of a bucket.
* @param storageClass Storage
|
GoogleCloudStorageItemInfo
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/reflect/InvokableTest.java
|
{
"start": 21278,
"end": 21855
}
|
class ____ {
@SuppressWarnings("unused") // called by reflection
InnerWithAnnotatedConstructorParameter(@Nullable String s) {}
}
public void testInnerClassWithAnnotatedConstructorParameter() {
Constructor<?> constructor =
InnerWithAnnotatedConstructorParameter.class.getDeclaredConstructors()[0];
Invokable<?, ?> invokable = Invokable.from(constructor);
assertEquals(1, invokable.getParameters().size());
assertEquals(TypeToken.of(String.class), invokable.getParameters().get(0).getType());
}
private
|
InnerWithAnnotatedConstructorParameter
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/VarTypeNameTest.java
|
{
"start": 1292,
"end": 1638
}
|
class ____ {}",
"// BUG: Diagnostic contains:",
" public <var> void foo(var foo) {}",
"}")
.setArgs(ImmutableList.of("-source", "8", "-target", "8"))
.doTest();
}
@Test
public void negative() {
testHelper
.addSourceLines(
"Test.java",
"""
|
var
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/MissingBindingValidationTest.java
|
{
"start": 5561,
"end": 5659
}
|
class ____ {",
" A() {}",
" }",
"",
" @Component()",
"
|
A
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java
|
{
"start": 48236,
"end": 49240
}
|
class ____ extends AnalyzerRule<UnaryPlan> {
@Override
protected LogicalPlan rule(UnaryPlan plan) {
if (plan.child() instanceof SubQueryAlias a) {
return plan.transformExpressionsDown(FieldAttribute.class, f -> {
if (f.qualifier() != null && f.qualifier().equals(a.alias())) {
// Find the underlying concrete relation (EsIndex) and its name as the new qualifier
List<LogicalPlan> children = a.collectFirstChildren(p -> p instanceof EsRelation);
if (children.isEmpty() == false) {
return f.withQualifier(((EsRelation) children.get(0)).index().name());
}
}
return f;
});
}
return plan;
}
@Override
protected boolean skipResolved() {
return false;
}
}
public static
|
ReplaceSubQueryAliases
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/conversion/lossy/OversizedKitchenDrawerDto.java
|
{
"start": 299,
"end": 1770
}
|
class ____ {
/* yes, its a big drawer */
private long numberOfForks;
private BigInteger numberOfKnifes;
private VerySpecialNumber numberOfSpoons;
private Double depth;
private BigDecimal length;
private double height;
private String drawerId;
public long getNumberOfForks() {
return numberOfForks;
}
public void setNumberOfForks(long numberOfForks) {
this.numberOfForks = numberOfForks;
}
public BigInteger getNumberOfKnifes() {
return numberOfKnifes;
}
public void setNumberOfKnifes(BigInteger numberOfKnifes) {
this.numberOfKnifes = numberOfKnifes;
}
public VerySpecialNumber getNumberOfSpoons() {
return numberOfSpoons;
}
public void setNumberOfSpoons(VerySpecialNumber numberOfSpoons) {
this.numberOfSpoons = numberOfSpoons;
}
public Double getDepth() {
return depth;
}
public void setDepth(Double depth) {
this.depth = depth;
}
public BigDecimal getLength() {
return length;
}
public void setLength(BigDecimal length) {
this.length = length;
}
public double getHeight() {
return height;
}
public void setHeight(double height) {
this.height = height;
}
public String getDrawerId() {
return drawerId;
}
public void setDrawerId(String drawerId) {
this.drawerId = drawerId;
}
}
|
OversizedKitchenDrawerDto
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java
|
{
"start": 3705,
"end": 4943
}
|
class ____ {
public final String id;
public final TestFieldSetting[] fieldSettings;
public final String[] fieldContent;
public String index = "test";
public String alias = "alias";
public TestDoc(String id, TestFieldSetting[] fieldSettings, String[] fieldContent) {
this.id = id;
assertEquals(fieldSettings.length, fieldContent.length);
this.fieldSettings = fieldSettings;
this.fieldContent = fieldContent;
}
public TestDoc index(String index) {
this.index = index;
return this;
}
public TestDoc alias(String alias) {
this.alias = alias;
return this;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("index:").append(index).append(" id:").append(id);
for (int i = 0; i < fieldSettings.length; i++) {
TestFieldSetting f = fieldSettings[i];
sb.append("\n").append("Field: ").append(f).append("\n content:").append(fieldContent[i]);
}
sb.append("\n");
return sb.toString();
}
}
protected static
|
TestDoc
|
java
|
spring-projects__spring-framework
|
spring-messaging/src/test/java/org/springframework/messaging/handler/annotation/support/DefaultMessageHandlerMethodFactoryTests.java
|
{
"start": 8062,
"end": 8567
}
|
class ____ {
private final Map<String, Boolean> invocations = new HashMap<>();
public void simpleString(String value) {
invocations.put("simpleString", true);
}
public void payloadValidation(@Payload @Validated String value) {
invocations.put("payloadValidation", true);
}
public void customArgumentResolver(Locale locale) {
invocations.put("customArgumentResolver", true);
assertThat(locale).as("Wrong value for locale").isEqualTo(Locale.getDefault());
}
}
static
|
SampleBean
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/LimitExecSerializationTests.java
|
{
"start": 579,
"end": 2047
}
|
class ____ extends AbstractPhysicalPlanSerializationTests<LimitExec> {
public static LimitExec randomLimitExec(int depth) {
Source source = randomSource();
PhysicalPlan child = randomChild(depth);
Expression limit = randomLimit();
return new LimitExec(source, child, limit, randomEstimatedRowSize());
}
private static Expression randomLimit() {
return new Literal(randomSource(), between(0, Integer.MAX_VALUE), DataType.INTEGER);
}
@Override
protected LimitExec createTestInstance() {
return randomLimitExec(0);
}
@Override
protected LimitExec mutateInstance(LimitExec instance) throws IOException {
PhysicalPlan child = instance.child();
Expression limit = instance.limit();
Integer estimatedRowSize = instance.estimatedRowSize();
switch (between(0, 2)) {
case 0 -> child = randomValueOtherThan(child, () -> randomChild(0));
case 1 -> limit = randomValueOtherThan(limit, LimitExecSerializationTests::randomLimit);
case 2 -> estimatedRowSize = randomValueOtherThan(estimatedRowSize, LimitExecSerializationTests::randomEstimatedRowSize);
default -> throw new AssertionError("Unexpected case");
}
return new LimitExec(instance.source(), child, limit, estimatedRowSize);
}
@Override
protected boolean alwaysEmptySource() {
return true;
}
}
|
LimitExecSerializationTests
|
java
|
apache__rocketmq
|
tools/src/main/java/org/apache/rocketmq/tools/command/CommandUtil.java
|
{
"start": 1587,
"end": 8392
}
|
class ____ {
private static final String ERROR_MESSAGE = "Make sure the specified clusterName exists or the name server connected to is correct.";
public static final String NO_MASTER_PLACEHOLDER = "NO_MASTER";
public static Map<String/*master addr*/, List<String>/*slave addr*/> fetchMasterAndSlaveDistinguish(
final MQAdminExt adminExt, final String clusterName)
throws InterruptedException, RemotingConnectException,
RemotingTimeoutException, RemotingSendRequestException,
MQBrokerException {
Map<String, List<String>> masterAndSlaveMap = new HashMap<>(4);
ClusterInfo clusterInfoSerializeWrapper = adminExt.examineBrokerClusterInfo();
Set<String> brokerNameSet = clusterInfoSerializeWrapper.getClusterAddrTable().get(clusterName);
if (brokerNameSet == null) {
System.out.printf("[error] %s", ERROR_MESSAGE);
return masterAndSlaveMap;
}
for (String brokerName : brokerNameSet) {
BrokerData brokerData = clusterInfoSerializeWrapper.getBrokerAddrTable().get(brokerName);
if (brokerData == null || brokerData.getBrokerAddrs() == null) {
continue;
}
String masterAddr = brokerData.getBrokerAddrs().get(MixAll.MASTER_ID);
if (masterAddr == null) {
masterAndSlaveMap.putIfAbsent(NO_MASTER_PLACEHOLDER, new ArrayList<>());
} else {
masterAndSlaveMap.put(masterAddr, new ArrayList<>());
}
for (Entry<Long, String> brokerAddrEntry : brokerData.getBrokerAddrs().entrySet()) {
if (brokerAddrEntry.getValue() == null || brokerAddrEntry.getKey() == MixAll.MASTER_ID) {
continue;
}
if (masterAddr == null) {
masterAndSlaveMap.get(NO_MASTER_PLACEHOLDER).add(brokerAddrEntry.getValue());
} else {
masterAndSlaveMap.get(masterAddr).add(brokerAddrEntry.getValue());
}
}
}
return masterAndSlaveMap;
}
public static Set<String> fetchMasterAddrByClusterName(final MQAdminExt adminExt, final String clusterName)
throws InterruptedException, RemotingConnectException, RemotingTimeoutException,
RemotingSendRequestException, MQBrokerException {
Set<String> masterSet = new HashSet<>();
ClusterInfo clusterInfoSerializeWrapper = adminExt.examineBrokerClusterInfo();
Set<String> brokerNameSet = clusterInfoSerializeWrapper.getClusterAddrTable().get(clusterName);
if (brokerNameSet != null) {
for (String brokerName : brokerNameSet) {
BrokerData brokerData = clusterInfoSerializeWrapper.getBrokerAddrTable().get(brokerName);
if (brokerData != null) {
String addr = brokerData.getBrokerAddrs().get(MixAll.MASTER_ID);
if (addr != null) {
masterSet.add(addr);
}
}
}
} else {
System.out.printf("[error] %s", ERROR_MESSAGE);
}
return masterSet;
}
public static Set<String> fetchMasterAndSlaveAddrByClusterName(final MQAdminExt adminExt, final String clusterName)
throws InterruptedException, RemotingConnectException, RemotingTimeoutException,
RemotingSendRequestException, MQBrokerException {
Set<String> brokerAddressSet = new HashSet<>();
ClusterInfo clusterInfoSerializeWrapper = adminExt.examineBrokerClusterInfo();
Set<String> brokerNameSet = clusterInfoSerializeWrapper.getClusterAddrTable().get(clusterName);
if (brokerNameSet != null) {
for (String brokerName : brokerNameSet) {
BrokerData brokerData = clusterInfoSerializeWrapper.getBrokerAddrTable().get(brokerName);
if (brokerData != null) {
final Collection<String> addrs = brokerData.getBrokerAddrs().values();
brokerAddressSet.addAll(addrs);
}
}
} else {
System.out.printf("[error] %s", ERROR_MESSAGE);
}
return brokerAddressSet;
}
public static String fetchMasterAddrByBrokerName(final MQAdminExt adminExt,
final String brokerName) throws Exception {
ClusterInfo clusterInfoSerializeWrapper = adminExt.examineBrokerClusterInfo();
BrokerData brokerData = clusterInfoSerializeWrapper.getBrokerAddrTable().get(brokerName);
if (null != brokerData) {
String addr = brokerData.getBrokerAddrs().get(MixAll.MASTER_ID);
if (addr != null) {
return addr;
}
}
throw new Exception(String.format("No broker address for broker name %s.", brokerName));
}
public static Set<String> fetchMasterAndSlaveAddrByBrokerName(final MQAdminExt adminExt, final String brokerName)
throws InterruptedException, RemotingConnectException, RemotingTimeoutException,
RemotingSendRequestException, MQBrokerException {
Set<String> brokerAddressSet = new HashSet<>();
ClusterInfo clusterInfoSerializeWrapper = adminExt.examineBrokerClusterInfo();
final BrokerData brokerData = clusterInfoSerializeWrapper.getBrokerAddrTable().get(brokerName);
if (brokerData != null) {
brokerAddressSet.addAll(brokerData.getBrokerAddrs().values());
}
return brokerAddressSet;
}
public static Set<String> fetchBrokerNameByClusterName(final MQAdminExt adminExt, final String clusterName)
throws Exception {
ClusterInfo clusterInfoSerializeWrapper = adminExt.examineBrokerClusterInfo();
Set<String> brokerNameSet = clusterInfoSerializeWrapper.getClusterAddrTable().get(clusterName);
if (brokerNameSet == null || brokerNameSet.isEmpty()) {
throw new Exception(ERROR_MESSAGE);
}
return brokerNameSet;
}
public static String fetchBrokerNameByAddr(final MQAdminExt adminExt, final String addr) throws Exception {
ClusterInfo clusterInfoSerializeWrapper = adminExt.examineBrokerClusterInfo();
Map<String/* brokerName */, BrokerData> brokerAddrTable = clusterInfoSerializeWrapper.getBrokerAddrTable();
Iterator<Map.Entry<String, BrokerData>> it = brokerAddrTable.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, BrokerData> entry = it.next();
HashMap<Long, String> brokerAddrs = entry.getValue().getBrokerAddrs();
if (brokerAddrs.containsValue(addr)) {
return entry.getKey();
}
}
throw new Exception(ERROR_MESSAGE);
}
}
|
CommandUtil
|
java
|
google__guava
|
android/guava/src/com/google/common/primitives/SignedBytes.java
|
{
"start": 6227,
"end": 7408
}
|
enum ____ implements Comparator<byte[]> {
INSTANCE;
@Override
public int compare(byte[] left, byte[] right) {
int minLength = Math.min(left.length, right.length);
for (int i = 0; i < minLength; i++) {
int result = Byte.compare(left[i], right[i]);
if (result != 0) {
return result;
}
}
return left.length - right.length;
}
@Override
public String toString() {
return "SignedBytes.lexicographicalComparator()";
}
}
/**
* Sorts the elements of {@code array} in descending order.
*
* @since 23.1
*/
public static void sortDescending(byte[] array) {
checkNotNull(array);
sortDescending(array, 0, array.length);
}
/**
* Sorts the elements of {@code array} between {@code fromIndex} inclusive and {@code toIndex}
* exclusive in descending order.
*
* @since 23.1
*/
public static void sortDescending(byte[] array, int fromIndex, int toIndex) {
checkNotNull(array);
checkPositionIndexes(fromIndex, toIndex, array.length);
Arrays.sort(array, fromIndex, toIndex);
Bytes.reverse(array, fromIndex, toIndex);
}
}
|
LexicographicalComparator
|
java
|
apache__camel
|
components/camel-influxdb/src/main/java/org/apache/camel/component/influxdb/InfluxDbProducer.java
|
{
"start": 1380,
"end": 5432
}
|
class ____ extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(InfluxDbProducer.class);
InfluxDbEndpoint endpoint;
InfluxDB connection;
public InfluxDbProducer(InfluxDbEndpoint endpoint) {
super(endpoint);
this.connection = endpoint.getInfluxDB();
this.endpoint = endpoint;
}
/**
* Processes the message exchange
*
* @param exchange the message exchange
* @throws Exception if an internal processing error has occurred.
*/
@Override
public void process(Exchange exchange) throws Exception {
String dataBaseName = calculateDatabaseName(exchange);
String retentionPolicy = calculateRetentionPolicy(exchange);
switch (endpoint.getOperation()) {
case InfluxDbOperations.INSERT:
doInsert(exchange, dataBaseName, retentionPolicy);
break;
case InfluxDbOperations.QUERY:
doQuery(exchange, dataBaseName);
break;
case InfluxDbOperations.PING:
doPing(exchange);
break;
default:
throw new IllegalArgumentException("The operation " + endpoint.getOperation() + " is not supported");
}
}
private void doInsert(Exchange exchange, String dataBaseName, String retentionPolicy) throws InvalidPayloadException {
if (!endpoint.isBatch()) {
Point p = exchange.getIn().getMandatoryBody(Point.class);
try {
if (LOG.isDebugEnabled()) {
LOG.debug("Writing point {}", p.lineProtocol());
}
connection.write(dataBaseName, retentionPolicy, p);
} catch (Exception ex) {
exchange.setException(new CamelInfluxDbException(ex));
}
} else {
BatchPoints batchPoints = exchange.getIn().getMandatoryBody(BatchPoints.class);
try {
if (LOG.isDebugEnabled()) {
LOG.debug("Writing BatchPoints {}", batchPoints.lineProtocol());
}
connection.write(batchPoints);
} catch (Exception ex) {
exchange.setException(new CamelInfluxDbException(ex));
}
}
}
private void doQuery(Exchange exchange, String dataBaseName) {
String query = calculateQuery(exchange);
Query influxdbQuery = new Query(query, dataBaseName);
QueryResult resultSet = connection.query(influxdbQuery);
MessageHelper.copyHeaders(exchange.getIn(), exchange.getOut(), true);
exchange.getMessage().setBody(resultSet);
}
private void doPing(Exchange exchange) {
Pong result = connection.ping();
MessageHelper.copyHeaders(exchange.getIn(), exchange.getOut(), true);
exchange.getMessage().setBody(result);
}
private String calculateRetentionPolicy(Exchange exchange) {
String retentionPolicy = exchange.getIn().getHeader(InfluxDbConstants.RETENTION_POLICY_HEADER, String.class);
if (ObjectHelper.isNotEmpty(retentionPolicy)) {
return retentionPolicy;
}
return endpoint.getRetentionPolicy();
}
private String calculateDatabaseName(Exchange exchange) {
String dbName = exchange.getIn().getHeader(InfluxDbConstants.DBNAME_HEADER, String.class);
if (ObjectHelper.isNotEmpty(dbName)) {
return dbName;
}
return endpoint.getDatabaseName();
}
private String calculateQuery(Exchange exchange) {
String query = exchange.getIn().getHeader(InfluxDbConstants.INFLUXDB_QUERY, String.class);
if (ObjectHelper.isNotEmpty(query)) {
return query;
} else {
query = endpoint.getQuery();
}
if (ObjectHelper.isEmpty(query)) {
throw new IllegalArgumentException("The query option must be set if you want to run a query operation");
}
return query;
}
}
|
InfluxDbProducer
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/event/internal/EntityCopyAllowedObserver.java
|
{
"start": 466,
"end": 1230
}
|
class ____ implements EntityCopyObserver {
public static final String SHORT_NAME = "allow";
private static final EntityCopyObserver INSTANCE = new EntityCopyAllowedObserver();
//This implementation of EntityCopyObserver is stateless, so no need to create multiple copies:
public static final EntityCopyObserverFactory FACTORY_OF_SELF = () -> INSTANCE;
private EntityCopyAllowedObserver() {
//Not to be constructed; use INSTANCE.
}
@Override
public void entityCopyDetected(
Object managedEntity,
Object mergeEntity1,
Object mergeEntity2,
EventSource session) {
// do nothing.
}
public void clear() {
// do nothing.
}
@Override
public void topLevelMergeComplete(EventSource session) {
// do nothing.
}
}
|
EntityCopyAllowedObserver
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsAdmin.java
|
{
"start": 2988,
"end": 3137
}
|
class ____ provides the functionality for the CLI `hdfs dfsadmin ...'
* commands.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public
|
that
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/builder/impl/SQLUpdateBuilderImpl.java
|
{
"start": 1545,
"end": 5999
}
|
class ____ extends SQLBuilderImpl implements SQLUpdateBuilder {
private SQLUpdateStatement stmt;
private DbType dbType;
public SQLUpdateBuilderImpl(DbType dbType) {
this.dbType = dbType;
}
public SQLUpdateBuilderImpl(String sql, DbType dbType) {
List<SQLStatement> stmtList = SQLUtils.parseStatements(sql, dbType);
if (stmtList.isEmpty()) {
throw new IllegalArgumentException("not support empty-statement :" + sql);
}
if (stmtList.size() > 1) {
throw new IllegalArgumentException("not support multi-statement :" + sql);
}
SQLUpdateStatement stmt = (SQLUpdateStatement) stmtList.get(0);
this.stmt = stmt;
this.dbType = dbType;
}
public SQLUpdateBuilderImpl(SQLUpdateStatement stmt, DbType dbType) {
this.stmt = stmt;
this.dbType = dbType;
}
@Override
public SQLUpdateBuilderImpl limit(int rowCount) {
throw new UnsupportedOperationException();
}
@Override
public SQLUpdateBuilderImpl limit(int rowCount, int offset) {
throw new UnsupportedOperationException();
}
@Override
public SQLUpdateBuilderImpl from(String table) {
return from(table, null);
}
@Override
public SQLUpdateBuilderImpl from(String table, String alias) {
SQLUpdateStatement update = getSQLUpdateStatement();
SQLExprTableSource from = new SQLExprTableSource(new SQLIdentifierExpr(table), alias);
update.setTableSource(from);
return this;
}
@Override
public SQLUpdateBuilderImpl where(String expr) {
SQLUpdateStatement update = getSQLUpdateStatement();
SQLExpr exprObj = SQLUtils.toSQLExpr(expr, dbType);
update.setWhere(exprObj);
return this;
}
@Override
public SQLUpdateBuilderImpl whereAnd(String expr) {
SQLUpdateStatement update = getSQLUpdateStatement();
SQLExpr exprObj = SQLUtils.toSQLExpr(expr, dbType);
SQLExpr newCondition = SQLUtils.buildCondition(SQLBinaryOperator.BooleanAnd, exprObj, false, update.getWhere());
update.setWhere(newCondition);
return this;
}
@Override
public SQLUpdateBuilderImpl whereOr(String expr) {
SQLUpdateStatement update = getSQLUpdateStatement();
SQLExpr exprObj = SQLUtils.toSQLExpr(expr, dbType);
SQLExpr newCondition = SQLUtils.buildCondition(SQLBinaryOperator.BooleanOr, exprObj, false, update.getWhere());
update.setWhere(newCondition);
return this;
}
public SQLUpdateBuilderImpl set(String... items) {
SQLUpdateStatement update = getSQLUpdateStatement();
for (String item : items) {
SQLUpdateSetItem updateSetItem = SQLUtils.toUpdateSetItem(item, dbType);
update.addItem(updateSetItem);
}
return this;
}
public SQLUpdateBuilderImpl setValue(Map<String, Object> values) {
for (Map.Entry<String, Object> entry : values.entrySet()) {
setValue(entry.getKey(), entry.getValue());
}
return this;
}
public SQLUpdateBuilderImpl setValue(String column, Object value) {
SQLUpdateStatement update = getSQLUpdateStatement();
SQLExpr columnExpr = SQLUtils.toSQLExpr(column, dbType);
SQLExpr valueExpr = toSQLExpr(value, dbType);
SQLUpdateSetItem item = new SQLUpdateSetItem();
item.setColumn(columnExpr);
item.setValue(valueExpr);
update.addItem(item);
return this;
}
public SQLUpdateStatement getSQLUpdateStatement() {
if (stmt == null) {
stmt = createSQLUpdateStatement();
}
return stmt;
}
public SQLUpdateStatement createSQLUpdateStatement() {
switch (dbType) {
case mysql:
case mariadb:
case tidb:
case polardbx:
return new MySqlUpdateStatement();
case oracle:
return new OracleUpdateStatement();
case postgresql:
case greenplum:
case edb:
return new PGUpdateStatement();
case sqlserver:
return new SQLServerUpdateStatement();
default:
return new SQLUpdateStatement();
}
}
public String toString() {
return SQLUtils.toSQLString(stmt, dbType);
}
}
|
SQLUpdateBuilderImpl
|
java
|
grpc__grpc-java
|
grpclb/src/main/java/io/grpc/grpclb/GrpclbLoadBalancerProvider.java
|
{
"start": 1240,
"end": 1434
}
|
class ____ not be directly referenced in
* code. The policy should be accessed through {@link io.grpc.LoadBalancerRegistry#getProvider}
* with the name "grpclb".
*/
@Internal
public final
|
should
|
java
|
google__auto
|
value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java
|
{
"start": 120266,
"end": 121144
}
|
interface ____ {",
" Builder blam(String x);",
" String build();",
" }",
"}");
Compilation compilation =
javac()
.withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor())
.compile(javaFileObject);
assertThat(compilation)
.hadErrorContaining(
"Method without arguments should be a build method returning foo.bar.Baz")
.inFile(javaFileObject)
.onLineContaining("String build()");
}
@Test
public void autoValueBuilderTypeParametersDontMatch1() {
JavaFileObject javaFileObject =
JavaFileObjects.forSourceLines(
"foo.bar.Baz",
"package foo.bar;",
"",
"import com.google.auto.value.AutoValue;",
"",
"@AutoValue",
"public abstract
|
Builder
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/ClassEndpointBuilderFactory.java
|
{
"start": 1445,
"end": 1566
}
|
interface ____ {
/**
* Builder for endpoint for the Class component.
*/
public
|
ClassEndpointBuilderFactory
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/UndefinedEqualsTest.java
|
{
"start": 15902,
"end": 16825
}
|
class ____ {
void listenableFuture(ListenableFuture a, ListenableFuture b) {
// BUG: Diagnostic contains: Future does not have well-defined equality semantics
a.equals(b);
}
void settableFuture(SettableFuture a, SettableFuture b) {
// BUG: Diagnostic contains: Future does not have well-defined equality semantics
a.equals(b);
}
void completableFuture(CompletableFuture a, CompletableFuture b) {
// BUG: Diagnostic contains: Future does not have well-defined equality semantics
a.equals(b);
}
void future(Future a, Future b) {
// BUG: Diagnostic contains: Future does not have well-defined equality semantics
a.equals(b);
}
}
""")
.doTest();
}
}
|
Test
|
java
|
apache__logging-log4j2
|
log4j-layout-template-json/src/main/java/org/apache/logging/log4j/layout/template/json/util/DummyRecyclerFactory.java
|
{
"start": 950,
"end": 1378
}
|
class ____ implements RecyclerFactory {
private static final DummyRecyclerFactory INSTANCE = new DummyRecyclerFactory();
private DummyRecyclerFactory() {}
public static DummyRecyclerFactory getInstance() {
return INSTANCE;
}
@Override
public <V> Recycler<V> create(final Supplier<V> supplier, final Consumer<V> cleaner) {
return new DummyRecycler<>(supplier);
}
}
|
DummyRecyclerFactory
|
java
|
quarkusio__quarkus
|
extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/StartupBuildSteps.java
|
{
"start": 2057,
"end": 6923
}
|
class ____ {
static final DotName STARTUP_NAME = DotName.createSimple(Startup.class.getName());
static final MethodDesc ARC_CONTAINER = MethodDesc.of(Arc.class, "container", ArcContainer.class);
static final MethodDesc ARC_CONTAINER_BEAN = MethodDesc.of(ArcContainer.class, "bean",
InjectableBean.class, String.class);
static final MethodDesc ARC_CONTAINER_INSTANCE = MethodDesc.of(ArcContainer.class, "instance",
InstanceHandle.class, InjectableBean.class);
static final MethodDesc INSTANCE_HANDLE_GET = MethodDesc.of(InstanceHandle.class, "get", Object.class);
static final MethodDesc CLIENT_PROXY_CONTEXTUAL_INSTANCE = MethodDesc.of(ClientProxy.class, "arc_contextualInstance",
Object.class);
static final MethodDesc CONTEXTUAL_CREATE = MethodDesc.of(Contextual.class, "create",
Object.class, CreationalContext.class);
static final MethodDesc CONTEXTUAL_DESTROY = MethodDesc.of(Contextual.class, "destroy",
void.class, Object.class, CreationalContext.class);
static final ConstructorDesc CREATIONAL_CONTEXT_IMPL_CTOR = ConstructorDesc.of(CreationalContextImpl.class,
Contextual.class);
private static final Logger LOG = Logger.getLogger(StartupBuildSteps.class);
@BuildStep
AutoAddScopeBuildItem addScope(CustomScopeAnnotationsBuildItem customScopes) {
// Class with no built-in scope annotation but with @Startup annotation
return AutoAddScopeBuildItem.builder()
.defaultScope(BuiltinScope.APPLICATION)
.match(new MatchPredicate() {
@Override
public boolean test(ClassInfo clazz, Collection<AnnotationInstance> annotations, IndexView index) {
if (Annotations.contains(annotations, STARTUP_NAME)) {
// Class annotated with @Startup
return true;
}
for (MethodInfo method : clazz.methods()) {
if (method.hasAnnotation(STARTUP_NAME)
&& !method.hasAnnotation(DotNames.PRODUCES)) {
// @Startup methods but not producers
return true;
}
}
return false;
}
})
.reason("Found classes containing @Startup annotation.")
.build();
}
@BuildStep
UnremovableBeanBuildItem unremovableBeans() {
return new UnremovableBeanBuildItem(new Predicate<BeanInfo>() {
@Override
public boolean test(BeanInfo bean) {
if (bean.isClassBean()) {
return bean.getTarget().get().asClass().hasAnnotation(STARTUP_NAME);
} else if (bean.isProducerMethod()) {
return !getAnnotations(Kind.METHOD, STARTUP_NAME, bean.getTarget().get().asMethod().annotations())
.isEmpty();
} else if (bean.isProducerField()) {
return bean.getTarget().get().asField().hasAnnotation(STARTUP_NAME);
}
// No target - synthetic bean
return false;
}
});
}
@BuildStep
void registerStartupObservers(ObserverRegistrationPhaseBuildItem observerRegistration,
BuildProducer<ObserverConfiguratorBuildItem> configurators) {
AnnotationStore annotationStore = observerRegistration.getContext().get(BuildExtension.Key.ANNOTATION_STORE);
for (BeanInfo bean : observerRegistration.getContext().beans()) {
if (bean.isSynthetic()) {
OptionalInt startupPriority = bean.getStartupPriority();
if (startupPriority.isPresent()) {
registerStartupObserver(observerRegistration, bean, bean.getIdentifier(),
startupPriority.getAsInt(), null);
}
} else {
// First check if the target is annotated with @Startup
// Class for class-based bean, method for producer method, etc.
AnnotationTarget target = bean.getTarget().get();
AnnotationInstance startupAnnotation = annotationStore.getAnnotation(target, STARTUP_NAME);
if (startupAnnotation != null) {
AnnotationValue priority = startupAnnotation.value();
registerStartupObserver(observerRegistration, bean, bean.getIdentifier(),
priority != null ? priority.asInt() : ObserverMethod.DEFAULT_PRIORITY, null);
}
if (target.kind() == Kind.CLASS) {
// If the target is a
|
StartupBuildSteps
|
java
|
apache__spark
|
common/unsafe/src/main/java/org/apache/spark/sql/catalyst/util/CollationFactory.java
|
{
"start": 28849,
"end": 29047
}
|
enum ____ {
CS, CI
}
/**
* Bit 16 in collation ID having value 0 for accent-sensitive and 1 for accent-insensitive
* collation.
*/
private
|
CaseSensitivity
|
java
|
apache__flink
|
flink-core-api/src/main/java/org/apache/flink/api/java/tuple/builder/Tuple25Builder.java
|
{
"start": 1268,
"end": 2222
}
|
class ____ {@link Tuple25}.
*
* @param <T0> The type of field 0
* @param <T1> The type of field 1
* @param <T2> The type of field 2
* @param <T3> The type of field 3
* @param <T4> The type of field 4
* @param <T5> The type of field 5
* @param <T6> The type of field 6
* @param <T7> The type of field 7
* @param <T8> The type of field 8
* @param <T9> The type of field 9
* @param <T10> The type of field 10
* @param <T11> The type of field 11
* @param <T12> The type of field 12
* @param <T13> The type of field 13
* @param <T14> The type of field 14
* @param <T15> The type of field 15
* @param <T16> The type of field 16
* @param <T17> The type of field 17
* @param <T18> The type of field 18
* @param <T19> The type of field 19
* @param <T20> The type of field 20
* @param <T21> The type of field 21
* @param <T22> The type of field 22
* @param <T23> The type of field 23
* @param <T24> The type of field 24
*/
@Public
public
|
for
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.