language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | mapstruct__mapstruct | processor/src/main/java/org/mapstruct/ap/internal/processor/ModelElementProcessor.java | {
"start": 1194,
"end": 1510
} | interface ____<P, R> {
/**
* Context object passed to
* {@link ModelElementProcessor#process(ProcessorContext, TypeElement, Object)}
* providing access to common infrastructure objects such as {@link TypeUtils}
* etc.
*
* @author Gunnar Morling
*/
public | ModelElementProcessor |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/FluxSampleFirst.java | {
"start": 6929,
"end": 7766
} | class ____<U> extends Operators.DeferredSubscription
implements InnerConsumer<U> {
final SampleFirstMain<?, U> main;
SampleFirstOther(SampleFirstMain<?, U> main) {
this.main = main;
}
@Override
public Context currentContext() {
return main.currentContext();
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.ACTUAL) return main;
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return super.scanUnsafe(key);
}
@Override
public void onSubscribe(Subscription s) {
if (set(s)) {
s.request(Long.MAX_VALUE);
}
}
@Override
public void onNext(U t) {
cancel();
main.otherNext();
}
@Override
public void onError(Throwable t) {
main.otherError(t);
}
@Override
public void onComplete() {
main.otherNext();
}
}
}
| SampleFirstOther |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/records/JsonIdentityOnRecord5238Test.java | {
"start": 755,
"end": 1237
} | class ____ {
public List<ThingPojo> allThings;
public ThingPojo selected;
@JsonCreator
public ExamplePojo(
@JsonProperty("allThings") List<ThingPojo> allThings,
@JsonProperty("selected") ThingPojo selected) {
this.allThings = allThings;
this.selected = selected;
}
}
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "id")
static | ExamplePojo |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java | {
"start": 11753,
"end": 17028
} | class ____ implements SpanQueryBuilder {
public static final String NAME = "span_gap";
/** Name of field to match against. */
private final String fieldName;
/** Width of the gap introduced. */
private final int width;
/**
* Constructs a new SpanGapQueryBuilder term query.
*
* @param fieldName The name of the field
* @param width The width of the gap introduced
*/
public SpanGapQueryBuilder(String fieldName, int width) {
if (Strings.isEmpty(fieldName)) {
throw new IllegalArgumentException("[span_gap] field name is null or empty");
}
// lucene has not coded any restriction on value of width.
// to-do : find if theoretically it makes sense to apply restrictions.
this.fieldName = fieldName;
this.width = width;
}
/**
* Read from a stream.
*/
public SpanGapQueryBuilder(StreamInput in) throws IOException {
fieldName = in.readString();
width = in.readInt();
}
/**
* @return fieldName The name of the field
*/
public String fieldName() {
return fieldName;
}
/**
* @return width The width of the gap introduced
*/
public int width() {
return width;
}
@Override
public Query toQuery(SearchExecutionContext context) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public String queryName() {
throw new UnsupportedOperationException();
}
@Override
public QueryBuilder queryName(String queryName) {
throw new UnsupportedOperationException();
}
@Override
public float boost() {
throw new UnsupportedOperationException();
}
@Override
public QueryBuilder boost(float boost) {
throw new UnsupportedOperationException();
}
@Override
public String getName() {
return NAME;
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersion.zero();
}
@Override
public final void writeTo(StreamOutput out) throws IOException {
out.writeString(fieldName);
out.writeInt(width);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.startObject(getName());
builder.field(fieldName, width);
builder.endObject();
builder.endObject();
return builder;
}
public static SpanGapQueryBuilder fromXContent(XContentParser parser) throws IOException {
String fieldName = null;
int width = 0;
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
throwParsingExceptionOnMultipleFields(NAME, parser.getTokenLocation(), fieldName, currentFieldName);
fieldName = currentFieldName;
} else if (token.isValue()) {
width = parser.intValue();
}
}
SpanGapQueryBuilder result = new SpanGapQueryBuilder(fieldName, width);
return result;
}
@Override
public final boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
SpanGapQueryBuilder other = (SpanGapQueryBuilder) obj;
return Objects.equals(fieldName, other.fieldName) && Objects.equals(width, other.width);
}
@Override
public final int hashCode() {
return Objects.hash(getClass(), fieldName, width);
}
@Override
public final String toString() {
return Strings.toString(this, true, true);
}
// copied from AbstractQueryBuilder
protected static void throwParsingExceptionOnMultipleFields(
String queryName,
XContentLocation contentLocation,
String processedFieldName,
String currentFieldName
) {
if (processedFieldName != null) {
throw new ParsingException(
contentLocation,
"["
+ queryName
+ "] query doesn't support multiple fields, found ["
+ processedFieldName
+ "] and ["
+ currentFieldName
+ "]"
);
}
}
}
}
| SpanGapQueryBuilder |
java | quarkusio__quarkus | extensions/spring-security/deployment/src/main/java/io/quarkus/spring/security/deployment/SpringPreAuthorizeAnnotatedMethodBuildItem.java | {
"start": 329,
"end": 775
} | class ____ extends SimpleBuildItem {
private final Map<MethodInfo, AnnotationInstance> methodToInstanceMap;
public SpringPreAuthorizeAnnotatedMethodBuildItem(Map<MethodInfo, AnnotationInstance> methodToInstanceMap) {
this.methodToInstanceMap = methodToInstanceMap;
}
public Map<MethodInfo, AnnotationInstance> getMethodToInstanceMap() {
return methodToInstanceMap;
}
}
| SpringPreAuthorizeAnnotatedMethodBuildItem |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java | {
"start": 72747,
"end": 104455
} | class ____ {
private final Map<String, Map<String, SubscribableListener<Void>>> shardSnapshotUpdatesBlockMap = new HashMap<>();
private static final SubscribableListener<Void> ALWAYS_PROCEED = SubscribableListener.newSucceeded(null);
private SubscribableListener<Void> listenerFor(String snapshot, String index) {
if ("last-snapshot".equals(snapshot) || "first-snapshot".equals(snapshot)) {
return ALWAYS_PROCEED;
}
return shardSnapshotUpdatesBlockMap
//
.computeIfAbsent(snapshot, v -> new HashMap<>())
.computeIfAbsent(index, v -> new SubscribableListener<>());
}
void releaseBlock(String snapshot, String index) {
listenerFor(snapshot, index).onResponse(null);
}
/**
* @return a {@link TransportInterceptor} which enforces the sequencing of shard snapshot updates
*/
TransportInterceptor newTransportInterceptor() {
return new TransportInterceptor() {
@Override
public <T extends TransportRequest> TransportRequestHandler<T> interceptHandler(
String action,
Executor executor,
boolean forceExecution,
TransportRequestHandler<T> actualHandler
) {
if (action.equals(TransportUpdateSnapshotStatusAction.NAME)) {
return (request, channel, task) -> ActionListener.run(
ActionTestUtils.<TransportResponse>assertNoFailureListener(new ChannelActionListener<>(channel)::onResponse),
l -> {
final var updateRequest = asInstanceOf(UpdateIndexShardSnapshotStatusRequest.class, request);
listenerFor(updateRequest.snapshot().getSnapshotId().getName(), updateRequest.shardId().getIndexName()).<
TransportResponse>andThen(
ll -> actualHandler.messageReceived(request, new TestTransportChannel(ll), task)
).addListener(l);
}
);
} else {
return actualHandler;
}
}
};
}
}
public void testDeleteIndexBetweenSuccessAndFinalization() {
final var sequencer = new ShardSnapshotUpdatesSequencer();
setupTestCluster(
1,
1,
node -> node.isMasterNode() ? sequencer.newTransportInterceptor() : TransportService.NOOP_TRANSPORT_INTERCEPTOR
);
final var masterNode = testClusterNodes.randomMasterNodeSafe();
final var client = masterNode.client;
final var masterClusterService = masterNode.clusterService;
final var snapshotCount = between(3, 5);
final var indices = IntStream.range(0, snapshotCount + 1).mapToObj(i -> "index-" + i).toList();
final var repoName = "repo";
final var indexToDelete = "index-" + snapshotCount;
var testListener = SubscribableListener
// Create the repo and indices
.<Void>newForked(stepListener -> {
try (var listeners = new RefCountingListener(stepListener)) {
client().admin()
.cluster()
.preparePutRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, repoName)
.setType(FsRepository.TYPE)
.setSettings(Settings.builder().put("location", randomAlphaOfLength(10)))
.execute(listeners.acquire(createRepoResponse -> {}));
for (final var index : indices) {
client.admin()
.indices()
.create(
new CreateIndexRequest(index).waitForActiveShards(ActiveShardCount.ALL).settings(defaultIndexSettings(1)),
listeners.acquire(createIndexResponse -> {})
);
}
}
})
.andThen(l -> {
// Create the first snapshot as source of the clone
client.admin()
.cluster()
.prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, repoName, "first-snapshot")
.setIndices("index-0", indexToDelete)
.setPartial(false)
.setWaitForCompletion(true)
.execute(l.map(v -> null));
});
// Start some snapshots such that snapshot-{i} contains index-{i} and index-{snapshotCount} so that we can control the order in
// which they finalize by controlling the order in which the shard snapshot updates are processed
final var cloneFuture = new PlainActionFuture<AcknowledgedResponse>();
for (int i = 0; i < snapshotCount; i++) {
final var snapshotName = "snapshot-" + i;
final var indexName = "index-" + i;
testListener = testListener.andThen(
stepListener -> client.admin()
.cluster()
.prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, repoName, snapshotName)
.setIndices(indexName, indexToDelete)
.setPartial(true)
.execute(stepListener.map(createSnapshotResponse -> null))
);
if (i == 0) {
// Insert a clone between snapshot-0 and snapshot-1 and it finalizes after snapshot-1 because it will be blocked on index-0
testListener = testListener.andThen(stepListener -> {
client.admin()
.cluster()
.prepareCloneSnapshot(TEST_REQUEST_TIMEOUT, repoName, "first-snapshot", "clone")
.setIndices("index-0", indexToDelete)
.execute(cloneFuture);
ClusterServiceUtils.addTemporaryStateListener(
masterClusterService,
clusterState -> SnapshotsInProgress.get(clusterState)
.asStream()
.anyMatch(e -> e.snapshot().getSnapshotId().getName().equals("clone") && e.isClone())
).addListener(stepListener.map(v -> null));
});
}
}
testListener = testListener
// wait for the target index to complete in snapshot-1
.andThen(l -> {
sequencer.releaseBlock("snapshot-0", indexToDelete);
sequencer.releaseBlock("snapshot-1", indexToDelete);
ClusterServiceUtils.addTemporaryStateListener(
masterClusterService,
clusterState -> SnapshotsInProgress.get(clusterState)
.asStream()
.filter(e -> e.isClone() == false)
.mapToLong(
e -> e.shards()
.entrySet()
.stream()
.filter(
e2 -> e2.getKey().getIndexName().equals(indexToDelete)
&& e2.getValue().state() == SnapshotsInProgress.ShardState.SUCCESS
)
.count()
)
.sum() == 2
).addListener(l.map(v -> null));
})
// delete the target index
.andThen(l -> client.admin().indices().delete(new DeleteIndexRequest(indexToDelete), l.map(acknowledgedResponse -> null)))
// wait for snapshot-1 to complete
.andThen(l -> {
sequencer.releaseBlock("snapshot-1", "index-1");
ClusterServiceUtils.addTemporaryStateListener(
masterClusterService,
cs -> SnapshotsInProgress.get(cs).asStream().noneMatch(e -> e.snapshot().getSnapshotId().getName().equals("snapshot-1"))
).addListener(l.map(v -> null));
})
// wait for all the other snapshots to complete
.andThen(l -> {
// Clone is yet to be finalized
assertTrue(SnapshotsInProgress.get(masterClusterService.state()).asStream().anyMatch(SnapshotsInProgress.Entry::isClone));
for (int i = 0; i < snapshotCount; i++) {
sequencer.releaseBlock("snapshot-" + i, indexToDelete);
sequencer.releaseBlock("snapshot-" + i, "index-" + i);
}
ClusterServiceUtils.addTemporaryStateListener(masterClusterService, cs -> SnapshotsInProgress.get(cs).isEmpty())
.addListener(l.map(v -> null));
})
.andThen(l -> {
final var snapshotNames = Stream.concat(
Stream.of("clone"),
IntStream.range(0, snapshotCount).mapToObj(i -> "snapshot-" + i)
).toArray(String[]::new);
client.admin()
.cluster()
.prepareGetSnapshots(TEST_REQUEST_TIMEOUT, repoName)
.setSnapshots(snapshotNames)
.execute(ActionTestUtils.assertNoFailureListener(getSnapshotsResponse -> {
for (final var snapshot : getSnapshotsResponse.getSnapshots()) {
assertThat(snapshot.state(), is(SnapshotState.SUCCESS));
final String snapshotName = snapshot.snapshot().getSnapshotId().getName();
if ("clone".equals(snapshotName)) {
// Clone is not affected by index deletion
assertThat(snapshot.indices(), containsInAnyOrder("index-0", indexToDelete));
} else {
// Does not contain the deleted index in the snapshot
assertThat(snapshot.indices(), contains("index-" + snapshotName.charAt(snapshotName.length() - 1)));
}
}
l.onResponse(null);
}));
});
deterministicTaskQueue.runAllRunnableTasks();
assertTrue(
"executed all runnable tasks but test steps are still incomplete: "
+ Strings.toString(SnapshotsInProgress.get(masterClusterService.state()), true, true),
testListener.isDone()
);
safeAwait(testListener); // shouldn't throw
assertTrue(cloneFuture.isDone());
}
@TestLogging(reason = "testing logging at INFO level", value = "org.elasticsearch.snapshots.SnapshotsService:INFO")
public void testFullSnapshotUnassignedShards() {
setupTestCluster(1, 0); // no data nodes, we want unassigned shards
final var indices = IntStream.range(0, between(1, 4)).mapToObj(i -> "index-" + i).sorted().toList();
final var repoName = "repo";
var testListener = SubscribableListener
// Create the repo and indices
.<Void>newForked(stepListener -> {
try (var listeners = new RefCountingListener(stepListener)) {
client().admin()
.cluster()
.preparePutRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, repoName)
.setType(FsRepository.TYPE)
.setSettings(Settings.builder().put("location", randomAlphaOfLength(10)))
.execute(listeners.acquire(createRepoResponse -> {}));
for (final var index : indices) {
deterministicTaskQueue.scheduleNow(
// wrapped in another scheduleNow() to randomize creation order
ActionRunnable.<CreateIndexResponse>wrap(
listeners.acquire(createIndexResponse -> {}),
l -> client().admin()
.indices()
.create(
new CreateIndexRequest(index).waitForActiveShards(ActiveShardCount.NONE)
.settings(defaultIndexSettings(1)),
l
)
)
);
}
}
})
// Take the snapshot to check the reaction to having unassigned shards
.<Void>andThen(
l -> client().admin()
.cluster()
.prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, repoName, randomIdentifier())
.setWaitForCompletion(randomBoolean())
.execute(new ActionListener<>() {
@Override
public void onResponse(CreateSnapshotResponse createSnapshotResponse) {
fail("snapshot should not have started");
}
@Override
public void onFailure(Exception e) {
assertThat(
asInstanceOf(SnapshotException.class, e).getMessage(),
allOf(
containsString("the following indices have unassigned primary shards"),
containsString("unless [partial] is set to [true]"),
containsString(indices.toString() /* NB sorted */),
containsString(ReferenceDocs.UNASSIGNED_SHARDS.toString())
)
);
l.onResponse(null);
}
})
);
MockLog.assertThatLogger(() -> {
deterministicTaskQueue.runAllRunnableTasks();
assertTrue("executed all runnable tasks but test steps are still incomplete", testListener.isDone());
safeAwait(testListener); // shouldn't throw
},
SnapshotsServiceUtils.class,
new MockLog.SeenEventExpectation(
"INFO log",
SnapshotsServiceUtils.class.getCanonicalName(),
Level.INFO,
"*failed to create snapshot*the following indices have unassigned primary shards*"
)
);
}
@TestLogging(reason = "testing logging at INFO level", value = "org.elasticsearch.snapshots.SnapshotsService:INFO")
public void testSnapshotNameAlreadyInUseExceptionLogging() {
setupTestCluster(1, 1);
final var repoName = "repo";
final var snapshotName = "test-snapshot";
final var testListener = createRepoAndIndex(repoName, "index", between(1, 2))
// take snapshot once
.<CreateSnapshotResponse>andThen(
l -> client().admin()
.cluster()
.prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, repoName, snapshotName)
.setWaitForCompletion(true)
.execute(l)
)
// take snapshot again
.<CreateSnapshotResponse>andThen(
l -> client().admin()
.cluster()
.prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, repoName, snapshotName)
.setWaitForCompletion(randomBoolean())
.execute(new ActionListener<>() {
@Override
public void onResponse(CreateSnapshotResponse createSnapshotResponse) {
fail("snapshot should not have started");
}
@Override
public void onFailure(Exception e) {
assertThat(ExceptionsHelper.unwrapCause(e), instanceOf(SnapshotNameAlreadyInUseException.class));
l.onResponse(null);
}
})
)
// attempt to clone snapshot
.<AcknowledgedResponse>andThen(
l -> client().admin()
.cluster()
.prepareCloneSnapshot(TEST_REQUEST_TIMEOUT, repoName, snapshotName, snapshotName)
.setIndices("*")
.execute(new ActionListener<>() {
@Override
public void onResponse(AcknowledgedResponse acknowledgedResponse) {
fail("snapshot should not have started");
}
@Override
public void onFailure(Exception e) {
assertThat(ExceptionsHelper.unwrapCause(e), instanceOf(SnapshotNameAlreadyInUseException.class));
l.onResponse(null);
}
})
);
final var expectedMessage = Strings.format("Invalid snapshot name [%s], snapshot with the same name already exists", snapshotName);
MockLog.assertThatLogger(() -> {
deterministicTaskQueue.runAllRunnableTasks();
assertTrue("executed all runnable tasks but test steps are still incomplete", testListener.isDone());
safeAwait(testListener); // shouldn't throw
},
SnapshotsServiceUtils.class,
new MockLog.SeenEventExpectation(
"INFO log",
SnapshotsServiceUtils.class.getCanonicalName(),
Level.INFO,
Strings.format("*failed to create snapshot*%s", expectedMessage)
),
new MockLog.SeenEventExpectation(
"INFO log",
SnapshotsServiceUtils.class.getCanonicalName(),
Level.INFO,
Strings.format("*failed to clone snapshot*%s", expectedMessage)
)
);
}
@TestLogging(reason = "testing logging at INFO level", value = "org.elasticsearch.snapshots.SnapshotsService:INFO")
public void testIndexNotFoundExceptionLogging() {
setupTestCluster(1, 0); // no need for data nodes here
final var repoName = "repo";
final var indexName = "does-not-exist";
final var testListener = SubscribableListener
// create repo
.<AcknowledgedResponse>newForked(
l -> client().admin()
.cluster()
.preparePutRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, repoName)
.setType(FsRepository.TYPE)
.setSettings(Settings.builder().put("location", randomAlphaOfLength(10)))
.execute(l)
)
// take snapshot of index that does not exist
.<CreateSnapshotResponse>andThen(
l -> client().admin()
.cluster()
.prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, repoName, randomIdentifier())
.setIndices(indexName)
.setWaitForCompletion(randomBoolean())
.execute(new ActionListener<>() {
@Override
public void onResponse(CreateSnapshotResponse createSnapshotResponse) {
fail("snapshot should not have started");
}
@Override
public void onFailure(Exception e) {
assertThat(ExceptionsHelper.unwrapCause(e), instanceOf(IndexNotFoundException.class));
l.onResponse(null);
}
})
);
MockLog.assertThatLogger(() -> {
deterministicTaskQueue.runAllRunnableTasks();
assertTrue("executed all runnable tasks but test steps are still incomplete", testListener.isDone());
safeAwait(testListener); // shouldn't throw
},
SnapshotsServiceUtils.class,
new MockLog.SeenEventExpectation(
"INFO log",
SnapshotsServiceUtils.class.getCanonicalName(),
Level.INFO,
Strings.format("failed to create snapshot: no such index [%s]", indexName)
)
);
}
@TestLogging(reason = "testing logging at INFO level", value = "org.elasticsearch.snapshots.SnapshotsService:INFO")
public void testIllegalArgumentExceptionLogging() {
setupTestCluster(1, 0); // no need for data nodes here
final var repoName = "repo";
final var testListener = SubscribableListener
// create repo
.<AcknowledgedResponse>newForked(
l -> client().admin()
.cluster()
.preparePutRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, repoName)
.setType(FsRepository.TYPE)
.setSettings(Settings.builder().put("location", randomAlphaOfLength(10)))
.execute(l)
)
// attempt to take snapshot with illegal config ('none' is allowed as a feature state iff it's the only one in the list)
.<CreateSnapshotResponse>andThen(
l -> client().admin()
.cluster()
.prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, repoName, randomIdentifier())
.setFeatureStates("none", "none")
.setWaitForCompletion(randomBoolean())
.execute(new ActionListener<>() {
@Override
public void onResponse(CreateSnapshotResponse createSnapshotResponse) {
fail("snapshot should not have started");
}
@Override
public void onFailure(Exception e) {
assertThat(ExceptionsHelper.unwrapCause(e), instanceOf(IllegalArgumentException.class));
l.onResponse(null);
}
})
);
MockLog.assertThatLogger(() -> {
deterministicTaskQueue.runAllRunnableTasks();
assertTrue("executed all runnable tasks but test steps are still incomplete", testListener.isDone());
safeAwait(testListener); // shouldn't throw
},
SnapshotsServiceUtils.class,
new MockLog.SeenEventExpectation(
"INFO log",
SnapshotsServiceUtils.class.getCanonicalName(),
Level.INFO,
Strings.format("*failed to create snapshot*other feature states were requested: [none, none]", "")
)
);
}
private RepositoryData getRepositoryData(Repository repository) {
final PlainActionFuture<RepositoryData> res = new PlainActionFuture<>();
repository.getRepositoryData(deterministicTaskQueue::scheduleNow, res);
deterministicTaskQueue.runAllRunnableTasks();
assertTrue(res.isDone());
return res.actionGet();
}
private SubscribableListener<CreateIndexResponse> createRepoAndIndex(String repoName, String index, int shards) {
final SubscribableListener<AcknowledgedResponse> createRepositoryListener = new SubscribableListener<>();
client().admin()
.cluster()
.preparePutRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, repoName)
.setType(FsRepository.TYPE)
.setSettings(Settings.builder().put("location", randomAlphaOfLength(10)))
.execute(createRepositoryListener);
final SubscribableListener<CreateIndexResponse> createIndexResponseStepListener = new SubscribableListener<>();
continueOrDie(
createRepositoryListener,
acknowledgedResponse -> client().admin()
.indices()
.create(
new CreateIndexRequest(index).waitForActiveShards(ActiveShardCount.ALL).settings(defaultIndexSettings(shards)),
createIndexResponseStepListener
)
);
return createIndexResponseStepListener;
}
private void clearDisruptionsAndAwaitSync() {
testClusterNodes.clearNetworkDisruptions();
stabilize();
}
private void disconnectOrRestartDataNode() {
if (randomBoolean()) {
disconnectRandomDataNode();
} else {
testClusterNodes.randomDataNode().ifPresent(TestClusterNodes.TestClusterNode::restart);
}
}
private void disconnectOrRestartMasterNode() {
testClusterNodes.randomMasterNode().ifPresent(masterNode -> {
if (randomBoolean()) {
testClusterNodes.disconnectNode(masterNode);
} else {
masterNode.restart();
}
});
}
private void disconnectRandomDataNode() {
testClusterNodes.randomDataNode().ifPresent(n -> testClusterNodes.disconnectNode(n));
}
private void startCluster() {
final ClusterState initialClusterState = new ClusterState.Builder(ClusterName.DEFAULT).nodes(testClusterNodes.discoveryNodes())
.build();
testClusterNodes.nodes.values().forEach(testClusterNode -> testClusterNode.start(initialClusterState));
deterministicTaskQueue.advanceTime();
deterministicTaskQueue.runAllRunnableTasks();
final VotingConfiguration votingConfiguration = new VotingConfiguration(
testClusterNodes.nodes.values()
.stream()
.map(n -> n.node)
.filter(DiscoveryNode::isMasterNode)
.map(DiscoveryNode::getId)
.collect(Collectors.toSet())
);
testClusterNodes.nodes.values()
.stream()
.filter(n -> n.node.isMasterNode())
.forEach(testClusterNode -> testClusterNode.coordinator.setInitialConfiguration(votingConfiguration));
// Connect all nodes to each other
testClusterNodes.nodes.values()
.forEach(
node -> testClusterNodes.nodes.values()
.forEach(
n -> n.transportService.connectToNode(
node.node,
ActionTestUtils.assertNoFailureListener(c -> logger.info("--> Connected [{}] to [{}]", n.node, node.node))
)
)
);
stabilize();
}
private void stabilize() {
final long endTime = deterministicTaskQueue.getCurrentTimeMillis() + AbstractCoordinatorTestCase.DEFAULT_STABILISATION_TIME;
while (deterministicTaskQueue.getCurrentTimeMillis() < endTime) {
deterministicTaskQueue.advanceTime();
deterministicTaskQueue.runAllRunnableTasks();
}
runUntil(() -> {
final Collection<ClusterState> clusterStates = testClusterNodes.nodes.values()
.stream()
.map(node -> node.clusterService.state())
.toList();
final Set<String> masterNodeIds = clusterStates.stream()
.map(clusterState -> clusterState.nodes().getMasterNodeId())
.collect(Collectors.toSet());
final Set<Long> terms = clusterStates.stream().map(ClusterState::term).collect(Collectors.toSet());
final List<Long> versions = clusterStates.stream().map(ClusterState::version).distinct().toList();
return versions.size() == 1 && masterNodeIds.size() == 1 && masterNodeIds.contains(null) == false && terms.size() == 1;
}, TimeUnit.MINUTES.toMillis(1L));
}
private void runUntil(Supplier<Boolean> fulfilled, long timeout) {
final long start = deterministicTaskQueue.getCurrentTimeMillis();
while (timeout > deterministicTaskQueue.getCurrentTimeMillis() - start) {
if (fulfilled.get()) {
return;
}
deterministicTaskQueue.runAllRunnableTasks();
deterministicTaskQueue.advanceTime();
}
fail("Condition wasn't fulfilled.");
}
private void setupTestCluster(int masterNodes, int dataNodes) {
setupTestCluster(masterNodes, dataNodes, ignored -> TransportService.NOOP_TRANSPORT_INTERCEPTOR);
}
private void setupTestCluster(
int masterNodes,
int dataNodes,
TestClusterNodes.TransportInterceptorFactory transportInterceptorFactory
) {
testClusterNodes = new TestClusterNodes(masterNodes, dataNodes, transportInterceptorFactory);
startCluster();
}
private void scheduleSoon(Runnable runnable) {
deterministicTaskQueue.scheduleAt(deterministicTaskQueue.getCurrentTimeMillis() + randomLongBetween(0, 100L), runnable);
}
private void scheduleNow(Runnable runnable) {
deterministicTaskQueue.scheduleNow(runnable);
}
private static Settings defaultIndexSettings(int shards) {
// TODO: randomize replica count settings once recovery operations aren't blocking anymore
return indexSettings(shards, 0).build();
}
private static <T> void continueOrDie(SubscribableListener<T> listener, CheckedConsumer<T, Exception> onResponse) {
listener.addListener(ActionTestUtils.assertNoFailureListener(onResponse));
}
public NodeClient client() {
// Select from sorted list of nodes
final List<TestClusterNodes.TestClusterNode> nodes = testClusterNodes.nodes.values()
.stream()
.filter(n -> testClusterNodes.disconnectedNodes.contains(n.node.getName()) == false)
.sorted(Comparator.comparing(n -> n.node.getName()))
.toList();
if (nodes.isEmpty()) {
throw new AssertionError("No nodes available");
}
return randomFrom(nodes).client;
}
/**
* Create a {@link Environment} with random path.home and path.repo
**/
private Environment createEnvironment(String nodeName) {
return TestEnvironment.newEnvironment(
Settings.builder()
.put(NODE_NAME_SETTING.getKey(), nodeName)
.put(PATH_HOME_SETTING.getKey(), tempDir.resolve(nodeName).toAbsolutePath())
.put(Environment.PATH_REPO_SETTING.getKey(), tempDir.resolve("repo").toAbsolutePath())
.putList(
ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING.getKey(),
ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING.get(Settings.EMPTY)
)
.put(MappingUpdatedAction.INDICES_MAX_IN_FLIGHT_UPDATES_SETTING.getKey(), 1000) // o.w. some tests might block
.build()
);
}
private static ClusterState stateForNode(ClusterState state, DiscoveryNode node) {
// Remove and add back local node to update ephemeral id on restarts
return ClusterState.builder(state)
.nodes(DiscoveryNodes.builder(state.nodes()).remove(node.getId()).add(node).localNodeId(node.getId()))
.build();
}
private final | ShardSnapshotUpdatesSequencer |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_extractingResultOf_with_SortedSet_Test.java | {
"start": 3228,
"end": 7195
} | class ____.class. Make sure public method exists and accepts no arguments!");
}
@Test
void should_use_method_name_as_description_when_extracting_result_of_method_list() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(jedis).extractingResultOf("age")
.isEmpty())
.withMessageContaining("[Extracted: result of age()]");
}
@Test
void should_use_method_name_as_description_when_extracting_typed_result_of_method_list() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(jedis).extractingResultOf("age",
Integer.class)
.isEmpty())
.withMessageContaining("[Extracted: result of age()]");
}
@Test
void extractingResultOf_should_keep_assertion_state() {
// WHEN
// not all comparators are used but we want to test that they are passed correctly after extracting
AbstractListAssert<?, ?, ?, ?> assertion = assertThat(jedis).as("test description")
.withFailMessage("error message")
.withRepresentation(UNICODE_REPRESENTATION)
.extractingResultOf("toString")
.usingComparatorForType(CaseInsensitiveStringComparator.INSTANCE,
String.class)
.containsOnly("YODA", "darth vader");
// THEN
assertThat(assertion.descriptionText()).isEqualTo("test description");
assertThat(assertion.info.representation()).isEqualTo(UNICODE_REPRESENTATION);
assertThat(assertion.info.overridingErrorMessage()).isEqualTo("error message");
assertThat(comparatorsByTypeOf(assertion).getComparatorForType(String.class)).isSameAs(CaseInsensitiveStringComparator.INSTANCE);
}
@Test
void strongly_typed_extractingResultOf_should_keep_assertion_state() {
// WHEN
// not all comparators are used but we want to test that they are passed correctly after extracting
AbstractListAssert<?, ?, ?, ?> assertion = assertThat(jedis).as("test description")
.withFailMessage("error message")
.withRepresentation(UNICODE_REPRESENTATION)
.extractingResultOf("toString", String.class)
.usingComparatorForType(CaseInsensitiveStringComparator.INSTANCE,
String.class)
.containsOnly("YODA", "darth vader");
// THEN
assertThat(assertion.descriptionText()).isEqualTo("test description");
assertThat(assertion.info.representation()).isEqualTo(UNICODE_REPRESENTATION);
assertThat(assertion.info.overridingErrorMessage()).isEqualTo("error message");
assertThat(comparatorsByTypeOf(assertion).getComparatorForType(String.class)).isSameAs(CaseInsensitiveStringComparator.INSTANCE);
}
private static SortedSet<FluentJedi> newSortedSet(FluentJedi... jedis) {
TreeSet<FluentJedi> jediSortedSet = new TreeSet<>(comparing(FluentJedi::age));
for (FluentJedi cartoonCharacter : jedis) {
jediSortedSet.add(cartoonCharacter);
}
return jediSortedSet;
}
}
| FluentJedi |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/cfg/CacheProvider.java | {
"start": 553,
"end": 1598
} | interface ____
extends java.io.Serializable
{
/**
* Method to provide a {@link LookupCache} instance for constructing {@link DeserializerCache}.
*
* @return {@link LookupCache} instance for constructing {@link DeserializerCache}.
*/
LookupCache<JavaType, ValueDeserializer<Object>> forDeserializerCache(DeserializationConfig config);
/**
* Method to provide a {@link LookupCache} instance for constructing {@link tools.jackson.databind.ser.SerializerCache}.
*
* @return {@link LookupCache} instance for constructing {@link tools.jackson.databind.ser.SerializerCache}.
*/
LookupCache<TypeKey, ValueSerializer<Object>> forSerializerCache(SerializationConfig config);
/**
* Method to provide a {@link LookupCache} instance for constructing {@link tools.jackson.databind.type.TypeFactory}.
*
* @return {@link LookupCache} instance for constructing {@link tools.jackson.databind.type.TypeFactory}.
*/
LookupCache<Object, JavaType> forTypeFactory();
}
| CacheProvider |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/javadoc/InvalidSnippetTest.java | {
"start": 1297,
"end": 1628
} | interface ____ {}
""")
.doTest();
}
@Test
public void snippetWithColon() {
helper
.addSourceLines(
"Test.java",
"""
/**
*
*
* {@snippet :
* I have a colon
* }
*/
| Test |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/EntryPointAssertions_catchException_Test.java | {
"start": 942,
"end": 1664
} | class ____ extends EntryPointAssertionsBaseTest {
private static final Exception EXCEPTION = new Exception();
@ParameterizedTest
@MethodSource("catchExceptions")
void should_catch_Exception(Function<ThrowingCallable, Exception> catchException) {
// GIVEN
ThrowingCallable throwingCallable = () -> {
throw EXCEPTION;
};
// WHEN
Exception throwable = catchException.apply(throwingCallable);
// THEN
then(throwable).isSameAs(EXCEPTION);
}
private static Stream<Function<ThrowingCallable, Exception>> catchExceptions() {
return Stream.of(Assertions::catchException, BDDAssertions::catchException, withAssertions::catchException);
}
}
| EntryPointAssertions_catchException_Test |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java | {
"start": 2412,
"end": 10838
} | class ____ extends CredentialProvider {
public static final Logger LOG = LoggerFactory.getLogger(
AbstractJavaKeyStoreProvider.class);
public static final String CREDENTIAL_PASSWORD_ENV_VAR =
"HADOOP_CREDSTORE_PASSWORD";
public static final String CREDENTIAL_PASSWORD_FILE_KEY =
CommonConfigurationKeysPublic.
HADOOP_SECURITY_CREDENTIAL_PASSWORD_FILE_KEY;
public static final String CREDENTIAL_PASSWORD_DEFAULT = "none";
private Path path;
private final URI uri;
private KeyStore keyStore;
private char[] password = null;
private boolean changed = false;
private Lock readLock;
private Lock writeLock;
private final Configuration conf;
protected AbstractJavaKeyStoreProvider(URI uri, Configuration conf)
throws IOException {
this.uri = uri;
this.conf = conf;
initFileSystem(uri);
locateKeystore();
ReadWriteLock lock = new ReentrantReadWriteLock(true);
readLock = lock.readLock();
writeLock = lock.writeLock();
}
protected Configuration getConf() {
return conf;
}
public Path getPath() {
return path;
}
public void setPath(Path p) {
this.path = p;
}
public char[] getPassword() {
return password;
}
public void setPassword(char[] pass) {
this.password = pass;
}
public boolean isChanged() {
return changed;
}
public void setChanged(boolean chg) {
this.changed = chg;
}
public Lock getReadLock() {
return readLock;
}
public void setReadLock(Lock rl) {
this.readLock = rl;
}
public Lock getWriteLock() {
return writeLock;
}
public void setWriteLock(Lock wl) {
this.writeLock = wl;
}
public URI getUri() {
return uri;
}
public KeyStore getKeyStore() {
return keyStore;
}
protected final String getPathAsString() {
return getPath().toString();
}
protected abstract String getSchemeName();
protected abstract String getKeyStoreType();
protected abstract String getAlgorithm();
protected abstract OutputStream getOutputStreamForKeystore()
throws IOException;
protected abstract boolean keystoreExists() throws IOException;
protected abstract InputStream getInputStreamForFile() throws IOException;
protected abstract void createPermissions(String perms) throws IOException;
protected abstract void stashOriginalFilePermissions() throws IOException;
protected void initFileSystem(URI keystoreUri)
throws IOException {
path = ProviderUtils.unnestUri(keystoreUri);
if (LOG.isDebugEnabled()) {
LOG.debug("backing jks path initialized to " + path);
}
}
@Override
public CredentialEntry getCredentialEntry(String alias)
throws IOException {
readLock.lock();
try {
SecretKeySpec key = null;
try {
if (!keyStore.containsAlias(alias)) {
return null;
}
key = (SecretKeySpec) keyStore.getKey(alias, password);
} catch (KeyStoreException e) {
throw new IOException("Can't get credential " + alias + " from "
+ getPathAsString(), e);
} catch (NoSuchAlgorithmException e) {
throw new IOException("Can't get algorithm for credential " + alias
+ " from " + getPathAsString(), e);
} catch (UnrecoverableKeyException e) {
throw new IOException("Can't recover credential " + alias + " from "
+ getPathAsString(), e);
}
return new CredentialEntry(alias, bytesToChars(key.getEncoded()));
} finally {
readLock.unlock();
}
}
public static char[] bytesToChars(byte[] bytes) throws IOException {
String pass;
pass = new String(bytes, StandardCharsets.UTF_8);
return pass.toCharArray();
}
@Override
public List<String> getAliases() throws IOException {
readLock.lock();
try {
ArrayList<String> list = new ArrayList<String>();
String alias = null;
try {
Enumeration<String> e = keyStore.aliases();
while (e.hasMoreElements()) {
alias = e.nextElement();
list.add(alias);
}
} catch (KeyStoreException e) {
throw new IOException("Can't get alias " + alias + " from "
+ getPathAsString(), e);
}
return list;
} finally {
readLock.unlock();
}
}
@Override
public CredentialEntry createCredentialEntry(String alias, char[] credential)
throws IOException {
writeLock.lock();
try {
if (keyStore.containsAlias(alias)) {
throw new IOException("Credential " + alias + " already exists in "
+ this);
}
return innerSetCredential(alias, credential);
} catch (KeyStoreException e) {
throw new IOException("Problem looking up credential " + alias + " in "
+ this, e);
} finally {
writeLock.unlock();
}
}
@Override
public void deleteCredentialEntry(String name) throws IOException {
writeLock.lock();
try {
try {
if (keyStore.containsAlias(name)) {
keyStore.deleteEntry(name);
} else {
throw new IOException("Credential " + name + " does not exist in "
+ this);
}
} catch (KeyStoreException e) {
throw new IOException("Problem removing " + name + " from " + this, e);
}
changed = true;
} finally {
writeLock.unlock();
}
}
CredentialEntry innerSetCredential(String alias, char[] material)
throws IOException {
writeLock.lock();
try {
keyStore.setKeyEntry(alias,
new SecretKeySpec(new String(material).getBytes(StandardCharsets.UTF_8),
getAlgorithm()), password, null);
} catch (KeyStoreException e) {
throw new IOException("Can't store credential " + alias + " in " + this,
e);
} finally {
writeLock.unlock();
}
changed = true;
return new CredentialEntry(alias, material);
}
@Override
public void flush() throws IOException {
writeLock.lock();
try {
if (!changed) {
LOG.debug("Keystore hasn't changed, returning.");
return;
}
LOG.debug("Writing out keystore.");
try (OutputStream out = getOutputStreamForKeystore()) {
keyStore.store(out, password);
} catch (KeyStoreException e) {
throw new IOException("Can't store keystore " + this, e);
} catch (NoSuchAlgorithmException e) {
throw new IOException("No such algorithm storing keystore " + this, e);
} catch (CertificateException e) {
throw new IOException("Certificate exception storing keystore " + this,
e);
}
changed = false;
} finally {
writeLock.unlock();
}
}
/**
* Open up and initialize the keyStore.
*
* @throws IOException If there is a problem reading the password file
* or a problem reading the keystore.
*/
private void locateKeystore() throws IOException {
try {
password = ProviderUtils.locatePassword(CREDENTIAL_PASSWORD_ENV_VAR,
conf.get(CREDENTIAL_PASSWORD_FILE_KEY));
if (password == null) {
password = CREDENTIAL_PASSWORD_DEFAULT.toCharArray();
}
KeyStore ks;
ks = KeyStore.getInstance(getKeyStoreType());
if (keystoreExists()) {
stashOriginalFilePermissions();
try (InputStream in = getInputStreamForFile()) {
ks.load(in, password);
}
} else {
createPermissions("600");
// required to create an empty keystore. *sigh*
ks.load(null, password);
}
keyStore = ks;
} catch (KeyStoreException e) {
throw new IOException("Can't create keystore", e);
} catch (GeneralSecurityException e) {
throw new IOException("Can't load keystore " + getPathAsString(), e);
}
}
@Override
public boolean needsPassword() throws IOException {
return (null == ProviderUtils.locatePassword(CREDENTIAL_PASSWORD_ENV_VAR,
conf.get(CREDENTIAL_PASSWORD_FILE_KEY)));
}
@Override
public String noPasswordWarning() {
return ProviderUtils.noPasswordWarning(CREDENTIAL_PASSWORD_ENV_VAR,
CREDENTIAL_PASSWORD_FILE_KEY);
}
@Override
public String noPasswordError() {
return ProviderUtils.noPasswordError(CREDENTIAL_PASSWORD_ENV_VAR,
CREDENTIAL_PASSWORD_FILE_KEY);
}
@Override
public String toString() {
return uri.toString();
}
}
| AbstractJavaKeyStoreProvider |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AUtils.java | {
"start": 7089,
"end": 8401
} | class ____
* {@link SdkException} passed in, and any status codes included
* in the operation. That is: HTTP error codes are examined and can be
* used to build a more specific response.
* @param operation operation
* @param path path operated on (may be null)
* @param exception amazon exception raised
* @return an IOE which wraps the caught exception.
*/
@SuppressWarnings("ThrowableInstanceNeverThrown")
public static IOException translateException(@Nullable String operation,
@Nullable String path,
SdkException exception) {
String message = String.format("%s%s: %s",
operation,
StringUtils.isNotEmpty(path)? (" on " + path) : "",
exception);
if (path == null || path.isEmpty()) {
// handle null path by giving it a stub value.
// not ideal/informative, but ensures that the path is never null in
// exceptions constructed.
path = "/";
}
exception = maybeProcessEncryptionClientException(exception);
if (!(exception instanceof AwsServiceException)) {
// exceptions raised client-side: connectivity, auth, network problems...
Exception innerCause = containsInterruptedException(exception);
if (innerCause != null) {
// interrupted IO, or a socket exception underneath that | of |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/service/ServiceAccountTokenStore.java | {
"start": 785,
"end": 1779
} | class ____ {
private final boolean success;
private final TokenSource tokenSource;
private StoreAuthenticationResult(TokenSource tokenSource, boolean success) {
this.success = success;
this.tokenSource = tokenSource;
}
public static StoreAuthenticationResult successful(TokenSource tokenSource) {
return new StoreAuthenticationResult(tokenSource, true);
}
public static StoreAuthenticationResult failed(TokenSource tokenSource) {
return new StoreAuthenticationResult(tokenSource, false);
}
public static StoreAuthenticationResult fromBooleanResult(TokenSource tokenSource, boolean result) {
return result ? successful(tokenSource) : failed(tokenSource);
}
public boolean isSuccess() {
return success;
}
public TokenSource getTokenSource() {
return tokenSource;
}
}
}
| StoreAuthenticationResult |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/producer/PreparedTxnState.java | {
"start": 1111,
"end": 4539
} | class ____ {
private final long producerId;
private final short epoch;
/**
* Creates a new empty PreparedTxnState
*/
public PreparedTxnState() {
this.producerId = RecordBatch.NO_PRODUCER_ID;
this.epoch = RecordBatch.NO_PRODUCER_EPOCH;
}
/**
* Creates a new PreparedTxnState from a serialized string representation
*
* @param serializedState The serialized string to deserialize.
* @throws IllegalArgumentException if the serialized string is not in the expected format
*/
public PreparedTxnState(String serializedState) {
if (serializedState == null || serializedState.isEmpty()) {
this.producerId = RecordBatch.NO_PRODUCER_ID;
this.epoch = RecordBatch.NO_PRODUCER_EPOCH;
return;
}
try {
String[] parts = serializedState.split(":");
if (parts.length != 2) {
throw new IllegalArgumentException("Invalid serialized transaction state format: " + serializedState);
}
this.producerId = Long.parseLong(parts[0]);
this.epoch = Short.parseShort(parts[1]);
// Validate the producerId and epoch values.
if (!(this.producerId >= 0 && this.epoch >= 0)) {
throw new IllegalArgumentException("Invalid producer ID and epoch values: " +
producerId + ":" + epoch + ". Both must be >= 0");
}
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Invalid serialized transaction state format: " + serializedState, e);
}
}
/**
* Creates a new PreparedTxnState with the given producer ID and epoch
*
* @param producerId The producer ID
* @param epoch The producer epoch
*/
PreparedTxnState(long producerId, short epoch) {
this.producerId = producerId;
this.epoch = epoch;
}
public long producerId() {
return producerId;
}
public short epoch() {
return epoch;
}
/**
* Checks if this preparedTxnState represents an initialized transaction with a valid producer ID
* that is not -1 (the uninitialized value).
*
* @return true if the state has an initialized transaction, false otherwise.
*/
public boolean hasTransaction() {
return producerId != RecordBatch.NO_PRODUCER_ID;
}
/**
* Returns a serialized string representation of this transaction state.
* The format is "producerId:epoch" for an initialized state, or an empty string
* for an uninitialized state (where producerId and epoch are both -1).
*
* @return a serialized string representation
*/
@Override
public String toString() {
if (!hasTransaction()) {
return "";
}
return producerId + ":" + epoch;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PreparedTxnState that = (PreparedTxnState) o;
return producerId == that.producerId && epoch == that.epoch;
}
@Override
public int hashCode() {
int result = 31;
result = 31 * result + Long.hashCode(producerId);
result = 31 * result + (int) epoch;
return result;
}
}
| PreparedTxnState |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/ser/jdk/NumberSerializers.java | {
"start": 2283,
"end": 2504
} | class ____ it is not designed as
* an extension point, and as such is not part of public API. This means that
* the compatibility across minor versions is only guaranteed on minor-to-minor
* basis, and | yourself |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/time/TemporalAccessorGetChronoFieldTest.java | {
"start": 3273,
"end": 3902
} | class ____ {
private static final int value1 = MONDAY.get(DAY_OF_WEEK);
// BUG: Diagnostic contains: TemporalAccessorGetChronoField
private static final int value2 = MONDAY.get(NANO_OF_DAY);
}
""")
.doTest();
}
@Test
public void temporalAccessor_realCode() {
helper
.addSourceLines(
"TestClass.java",
"""
import static java.time.temporal.ChronoField.MICRO_OF_SECOND;
import static java.time.temporal.ChronoField.DAY_OF_WEEK;
import java.time.Instant;
public | TestClass |
java | micronaut-projects__micronaut-core | http-client-tck/src/main/java/io/micronaut/http/client/tck/tests/RawTest.java | {
"start": 9786,
"end": 10656
} | class ____ {
@RequestFilter("/filter-request-replace-response")
public HttpResponse<?> filterRequestReplaceResponse(HttpRequest<?> request, @Body String body) throws Exception {
// @Body happens to work, but this should very much be considered experimental (and will only work for the raw client)
return HttpResponse.ok("Replaced response. Request body: " + body);
}
@ResponseFilter("/filter-replace-response")
@ExecuteOn(TaskExecutors.BLOCKING)
public HttpResponse<?> filterReplaceResponse(HttpResponse<?> response) throws Exception {
try (ByteBodyHttpResponse<?> r = (ByteBodyHttpResponse<?>) response) {
return HttpResponse.ok("Replaced response. Response body: " + r.byteBody().buffer().get().toString(StandardCharsets.UTF_8));
}
}
}
}
| RawFilter |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/postgresql/PGLimitTest.java | {
"start": 153,
"end": 750
} | class ____ {
/**
* @param args
*/
public static void main(String[] args) {
DbType dbType = JdbcConstants.POSTGRESQL; // "postgresql";
// dbType = "mysql";
String sql = " select * from brandinfo where 1=1 and brandid > 100 order by brandid asc";
String sqlLimit = com.alibaba.druid.sql.PagerUtils.limit(sql, dbType,
2499, 100);
System.out.println("sqlLimit == " + sqlLimit);
String sqlCount = com.alibaba.druid.sql.PagerUtils.count(sql, dbType);
System.out.println("sqlCount == " + sqlCount);
}
}
| PGLimitTest |
java | mapstruct__mapstruct | processor/src/main/java/org/mapstruct/ap/internal/model/source/builtin/CalendarToXmlGregorianCalendar.java | {
"start": 565,
"end": 1322
} | class ____ extends AbstractToXmlGregorianCalendar {
private final Parameter parameter;
private final Set<Type> importTypes;
public CalendarToXmlGregorianCalendar(TypeFactory typeFactory) {
super( typeFactory );
this.parameter = new Parameter( "cal ", typeFactory.getType( Calendar.class ) );
this.importTypes = asSet(
parameter.getType(),
typeFactory.getType( GregorianCalendar.class )
);
}
@Override
public Set<Type> getImportTypes() {
Set<Type> result = super.getImportTypes();
result.addAll( this.importTypes );
return result;
}
@Override
public Parameter getParameter() {
return parameter;
}
}
| CalendarToXmlGregorianCalendar |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/support/Spr7816Tests.java | {
"start": 1928,
"end": 2004
} | class ____ permits Building, Entrance, Dwelling {
}
static final | DomainEntity |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/zoneddatetime/ZonedDateTimeAssert_isAfterOrEqualTo_Test.java | {
"start": 1294,
"end": 2882
} | class ____ extends AbstractZonedDateTimeAssertBaseTest {
@Override
protected ZonedDateTimeAssert invoke_api_method() {
return assertions.isAfterOrEqualTo(NOW)
.isAfterOrEqualTo(YESTERDAY.toString());
}
@Override
protected void verify_internal_effects() {
verify(comparables).assertIsAfterOrEqualTo(getInfo(assertions), getActual(assertions), NOW);
verify(comparables).assertIsAfterOrEqualTo(getInfo(assertions), getActual(assertions), YESTERDAY);
}
@Test
void should_fail_if_zonedDateTime_parameter_is_null() {
// GIVEN
ZonedDateTime otherZonedDateTime = null;
// WHEN
ThrowingCallable code = () -> assertThat(NOW).isAfterOrEqualTo(otherZonedDateTime);
// THEN
thenIllegalArgumentException().isThrownBy(code)
.withMessage("The ZonedDateTime to compare actual with should not be null");
}
@Test
void should_fail_if_zonedDateTime_as_string_parameter_is_null() {
// GIVEN
String otherZonedDateTimeAsString = null;
// WHEN
ThrowingCallable code = () -> assertThat(NOW).isAfterOrEqualTo(otherZonedDateTimeAsString);
// THEN
thenIllegalArgumentException().isThrownBy(code)
.withMessage("The String representing the ZonedDateTime to compare actual with should not be null");
}
@Test
void should_fail_if_given_string_parameter_cant_be_parsed() {
assertThatThrownBy(() -> assertions.isAfterOrEqualTo("not a ZonedDateTime")).isInstanceOf(DateTimeParseException.class);
}
}
| ZonedDateTimeAssert_isAfterOrEqualTo_Test |
java | apache__hadoop | hadoop-tools/hadoop-gcp/src/main/java/org/apache/hadoop/fs/gs/HadoopConfigurationProperty.java | {
"start": 1333,
"end": 4131
} | class ____<T> {
private static final Logger LOG = LoggerFactory.getLogger(HadoopConfigurationProperty.class);
private final String key;
private final List<String> deprecatedKeys;
private final T defaultValue;
private List<String> keyPrefixes = ImmutableList.of("");
HadoopConfigurationProperty(String key) {
this(key, null);
}
HadoopConfigurationProperty(String key, T defaultValue, String... deprecatedKeys) {
this.key = key;
this.deprecatedKeys =
deprecatedKeys == null ? ImmutableList.of() : ImmutableList.copyOf(deprecatedKeys);
this.defaultValue = defaultValue;
}
String getKey() {
return key;
}
T getDefault() {
return defaultValue;
}
T get(Configuration config, BiFunction<String, T, T> getterFn) {
String lookupKey = getLookupKey(config, key, (c, k) -> c.get(k) != null);
return logProperty(lookupKey, getterFn.apply(lookupKey, defaultValue));
}
Duration getTimeDuration(Configuration config) {
String lookupKey = getLookupKey(config, key, (c, k) -> c.get(k) != null);
String defValStr = defaultValue == null ? null : String.valueOf(defaultValue);
return logProperty(
lookupKey, Duration.ofMillis(config.getTimeDuration(lookupKey, defValStr, MILLISECONDS)));
}
HadoopConfigurationProperty<T> withPrefixes(List<String> prefixes) {
this.keyPrefixes = ImmutableList.copyOf(prefixes);
return this;
}
RedactedString getPassword(Configuration config) {
checkState(defaultValue == null || defaultValue instanceof String, "Not a string property");
String lookupKey = getLookupKey(config, key, (c, k) -> c.get(k) != null);
char[] value;
try {
value = config.getPassword(lookupKey);
} catch (IOException e) {
throw new RuntimeException(e);
}
return logProperty(
lookupKey,
RedactedString.create(value == null ? (String) defaultValue : String.valueOf(value)));
}
private String getLookupKey(Configuration config, String lookupKey,
BiFunction<Configuration, String, Boolean> checkFn) {
for (String prefix : keyPrefixes) {
String prefixedKey = prefix + lookupKey;
if (checkFn.apply(config, prefixedKey)) {
return prefixedKey;
}
for (String deprecatedKey : deprecatedKeys) {
String prefixedDeprecatedKey = prefix + deprecatedKey;
if (checkFn.apply(config, prefixedDeprecatedKey)) {
LOG.warn("Using deprecated key '{}', use '{}' key instead.", prefixedDeprecatedKey,
prefixedKey);
return prefixedDeprecatedKey;
}
}
}
return keyPrefixes.get(0) + lookupKey;
}
private static <S> S logProperty(String key, S value) {
LOG.trace("{} = {}", key, value);
return value;
}
}
| HadoopConfigurationProperty |
java | apache__kafka | connect/api/src/main/java/org/apache/kafka/connect/health/ConnectClusterState.java | {
"start": 1248,
"end": 3440
} | interface ____ {
/**
* Get the names of the connectors currently deployed in this cluster. This is a full list of connectors in the cluster gathered from
* the current configuration, which may change over time.
*
* @return collection of connector names, never {@code null}
*/
Collection<String> connectors();
/**
* Lookup the current health of a connector and its tasks. This provides the current snapshot of health by querying the underlying
* herder. A connector returned by previous invocation of {@link #connectors()} may no longer be available and could result in {@link
* org.apache.kafka.connect.errors.NotFoundException}.
*
* @param connName name of the connector
* @return the health of the connector for the connector name
* @throws org.apache.kafka.connect.errors.NotFoundException if the requested connector can't be found
*/
ConnectorHealth connectorHealth(String connName);
/**
* Lookup the current configuration of a connector. This provides the current snapshot of configuration by querying the underlying
* herder. A connector returned by previous invocation of {@link #connectors()} may no longer be available and could result in {@link
* org.apache.kafka.connect.errors.NotFoundException}.
*
* @param connName name of the connector
* @return the configuration of the connector for the connector name
* @throws org.apache.kafka.connect.errors.NotFoundException if the requested connector can't be found
* @throws java.lang.UnsupportedOperationException if the default implementation has not been overridden
*/
default Map<String, String> connectorConfig(String connName) {
throw new UnsupportedOperationException();
}
/**
* Get details about the setup of the Connect cluster.
* @return a {@link ConnectClusterDetails} object containing information about the cluster
* @throws java.lang.UnsupportedOperationException if the default implementation has not been overridden
*/
default ConnectClusterDetails clusterDetails() {
throw new UnsupportedOperationException();
}
}
| ConnectClusterState |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractContentSummary.java | {
"start": 1387,
"end": 2592
} | class ____ extends AbstractContractContentSummaryTest {
@Test
public void testGetContentSummaryDir() throws Throwable {
describe("getContentSummary on test dir with children");
S3AFileSystem fs = getFileSystem();
Path baseDir = methodPath();
// Nested folders created separately will return as separate objects in listFiles()
fs.mkdirs(new Path(baseDir, "a"));
fs.mkdirs(new Path(baseDir, "a/b"));
fs.mkdirs(new Path(baseDir, "a/b/a"));
// Will return as one object
fs.mkdirs(new Path(baseDir, "d/e/f"));
Path filePath = new Path(baseDir, "a/b/file");
touch(fs, filePath);
// look at path to see if it is a file
// it is not: so LIST
final ContentSummary summary = fs.getContentSummary(baseDir);
Assertions.assertThat(summary.getDirectoryCount()).as("Summary " + summary).isEqualTo(7);
Assertions.assertThat(summary.getFileCount()).as("Summary " + summary).isEqualTo(1);
}
@Override
protected AbstractFSContract createContract(Configuration conf) {
return new S3AContract(conf);
}
@Override
public S3AFileSystem getFileSystem() {
return (S3AFileSystem) super.getFileSystem();
}
}
| ITestS3AContractContentSummary |
java | apache__camel | dsl/camel-jbang/camel-jbang-core/src/main/java/org/apache/camel/dsl/jbang/core/commands/catalog/CatalogDevConsole.java | {
"start": 1238,
"end": 2264
} | class ____ extends CatalogBaseCommand {
public CatalogDevConsole(CamelJBangMain main) {
super(main);
}
@Override
List<Row> collectRows() {
List<Row> rows = new ArrayList<>();
for (String name : catalog.findDevConsoleNames()) {
DevConsoleModel model = catalog.devConsoleModel(name);
if (model != null) {
Row row = new Row();
row.name = model.getName();
row.title = model.getTitle();
row.level = model.getSupportLevel().name();
row.since = fixQuarkusSince(model.getFirstVersionShort());
row.description = model.getDescription();
row.label = model.getLabel() != null ? model.getLabel() : "";
row.deprecated = model.isDeprecated();
row.nativeSupported = model.isNativeSupported();
row.gav = getGAV(model);
rows.add(row);
}
}
return rows;
}
}
| CatalogDevConsole |
java | micronaut-projects__micronaut-core | inject/src/main/java/io/micronaut/inject/beans/AbstractInitializableBeanIntrospection.java | {
"start": 46468,
"end": 49227
} | class ____<P> implements BeanMethod<B, P>, ExecutableMethod<B, P> {
private final BeanMethodRef<P> ref;
private BeanMethodImpl(BeanMethodRef<P> ref) {
this.ref = ref;
}
@NonNull
@Override
public BeanIntrospection<B> getDeclaringBean() {
return AbstractInitializableBeanIntrospection.this;
}
@Override
public @NonNull
ReturnType<P> getReturnType() {
//noinspection unchecked
return new ReturnType() {
@Override
public Class<P> getType() {
return ref.returnType.getType();
}
@Override
@NonNull
public Argument<P> asArgument() {
return ref.returnType;
}
@Override
public Map<String, Argument<?>> getTypeVariables() {
return ref.returnType.getTypeVariables();
}
@NonNull
@Override
public AnnotationMetadata getAnnotationMetadata() {
return EvaluatedAnnotationMetadata.wrapIfNecessary(ref.returnType.getAnnotationMetadata());
}
};
}
@NonNull
@Override
public AnnotationMetadata getAnnotationMetadata() {
return ref.annotationMetadata == null ? AnnotationMetadata.EMPTY_METADATA : ref.annotationMetadata;
}
@NonNull
@Override
public String getName() {
return ref.name;
}
@Override
public Argument<?>[] getArguments() {
return ref.arguments == null ? Argument.ZERO_ARGUMENTS : ref.arguments;
}
@Override
public P invoke(@NonNull B instance, Object... arguments) {
return dispatch(ref.methodIndex, instance, arguments);
}
@Override
public Method getTargetMethod() {
if (ClassUtils.REFLECTION_LOGGER.isWarnEnabled()) {
ClassUtils.REFLECTION_LOGGER.warn("Using getTargetMethod for method {} on type {} requires the use of reflection. GraalVM configuration necessary", getName(), getDeclaringType());
}
return getTargetMethodByIndex(ref.methodIndex);
}
@Override
public Class<B> getDeclaringType() {
return getDeclaringBean().getBeanType();
}
@Override
public String getMethodName() {
return getName();
}
}
/**
* Bean property compile-time data container.
*
* @param <P> The property type.
*/
@Internal
@UsedByGeneratedCode
public static final | BeanMethodImpl |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/usertype/MyEntity.java | {
"start": 262,
"end": 436
} | class ____ {
@Id
@Type(MyType.class)
MyId id;
String content;
MyEntity(MyId id, String content) {
this.id = id;
this.content = content;
}
MyEntity() {
}
}
| MyEntity |
java | apache__avro | lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTask.java | {
"start": 1803,
"end": 2407
} | class ____<IN, MID, OUT> {
static final Logger LOG = LoggerFactory.getLogger(TetherTask.class);
private Transceiver clientTransceiver;
private OutputProtocol outputClient;
private TaskType taskType;
private int partitions;
private DecoderFactory decoderFactory = DecoderFactory.get();
private BinaryDecoder decoder;
private SpecificDatumReader<IN> inReader;
private SpecificDatumReader<MID> midReader;
private IN inRecord;
private MID midRecord;
private MID midRecordSpare;
private Collector<MID> midCollector;
private Collector<OUT> outCollector;
private static | TetherTask |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/engine/Engine.java | {
"start": 74388,
"end": 75924
} | class ____ extends IndexSearcher implements Releasable {
private final String source;
private final Closeable onClose;
public Searcher(
String source,
IndexReader reader,
Similarity similarity,
QueryCache queryCache,
QueryCachingPolicy queryCachingPolicy,
Closeable onClose
) {
super(reader);
setSimilarity(similarity);
setQueryCache(queryCache);
setQueryCachingPolicy(queryCachingPolicy);
this.source = source;
this.onClose = onClose;
}
/**
* The source that caused this searcher to be acquired.
*/
public String source() {
return source;
}
public DirectoryReader getDirectoryReader() {
if (getIndexReader() instanceof DirectoryReader) {
return (DirectoryReader) getIndexReader();
}
throw new IllegalStateException("Can't use " + getIndexReader().getClass() + " as a directory reader");
}
@Override
public void close() {
try {
onClose.close();
} catch (IOException e) {
throw new UncheckedIOException("failed to close", e);
} catch (AlreadyClosedException e) {
// This means there's a bug somewhere: don't suppress it
throw new AssertionError(e);
}
}
}
public abstract static | Searcher |
java | apache__camel | components/camel-smb/src/test/java/org/apache/camel/component/smb/SmbChangedReadLockIT.java | {
"start": 1833,
"end": 4655
} | class ____ extends SmbServerTestSupport {
@TempDir
Path testDirectory;
protected String getSmbUrl() {
return String.format(
"smb:%s/%s/changed?username=%s&password=%s&searchPattern=*&readLock=changed&readLockCheckInterval=1000&delete=true",
service.address(), service.shareName(), service.userName(), service.password());
}
@Test
public void testChangedReadLock() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
mock.expectedFileExists(testDirectory.resolve("out/slowfile.dat"));
writeSlowFile();
MockEndpoint.assertIsSatisfied(context);
String content = context.getTypeConverter().convertTo(String.class, testDirectory.resolve("out/slowfile.dat").toFile());
String[] lines = content.split(System.lineSeparator());
assertEquals(20, lines.length, "There should be 20 lines in the file");
for (int i = 0; i < 20; i++) {
assertEquals("Line " + i, lines[i]);
}
}
private void writeSlowFile() throws Exception {
SMBClient smbClient = new SMBClient();
int port = Integer.parseInt(service.address().split(":")[1]);
try (Connection connection = smbClient.connect("localhost", port)) {
AuthenticationContext ac = new AuthenticationContext(service.userName(), service.password().toCharArray(), null);
Session session = connection.authenticate(ac);
// Connect to Share
try (DiskShare share = (DiskShare) session.connectShare(service.shareName())) {
if (!share.folderExists("/changed")) {
new SmbFiles().mkdirs(share, "/changed");
}
try (File f = share.openFile("changed/slowfile.dat", EnumSet.of(AccessMask.FILE_WRITE_DATA),
EnumSet.of(FileAttributes.FILE_ATTRIBUTE_NORMAL), SMB2ShareAccess.ALL,
SMB2CreateDisposition.FILE_OPEN_IF, EnumSet.of(SMB2CreateOptions.FILE_DIRECTORY_FILE))) {
int offset = 0;
for (int i = 0; i < 20; i++) {
byte[] b = ("Line " + i + System.lineSeparator()).getBytes(StandardCharsets.UTF_8);
f.write(new ArrayByteChunkProvider(b, offset));
offset += b.length;
Thread.sleep(200L);
}
}
}
}
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from(getSmbUrl()).to(TestSupport.fileUri(testDirectory, "out"), "mock:result");
}
};
}
}
| SmbChangedReadLockIT |
java | alibaba__nacos | core/src/main/java/com/alibaba/nacos/core/distributed/distro/task/delay/DistroDelayTaskProcessor.java | {
"start": 1252,
"end": 2683
} | class ____ implements NacosTaskProcessor {
private final DistroTaskEngineHolder distroTaskEngineHolder;
private final DistroComponentHolder distroComponentHolder;
public DistroDelayTaskProcessor(DistroTaskEngineHolder distroTaskEngineHolder,
DistroComponentHolder distroComponentHolder) {
this.distroTaskEngineHolder = distroTaskEngineHolder;
this.distroComponentHolder = distroComponentHolder;
}
@Override
public boolean process(NacosTask task) {
if (!(task instanceof DistroDelayTask)) {
return true;
}
DistroDelayTask distroDelayTask = (DistroDelayTask) task;
DistroKey distroKey = distroDelayTask.getDistroKey();
switch (distroDelayTask.getAction()) {
case DELETE:
DistroSyncDeleteTask syncDeleteTask = new DistroSyncDeleteTask(distroKey, distroComponentHolder);
distroTaskEngineHolder.getExecuteWorkersManager().addTask(distroKey, syncDeleteTask);
return true;
case CHANGE:
case ADD:
DistroSyncChangeTask syncChangeTask = new DistroSyncChangeTask(distroKey, distroComponentHolder);
distroTaskEngineHolder.getExecuteWorkersManager().addTask(distroKey, syncChangeTask);
return true;
default:
return false;
}
}
}
| DistroDelayTaskProcessor |
java | spring-projects__spring-framework | spring-websocket/src/test/java/org/springframework/web/socket/config/HandlersBeanDefinitionParserTests.java | {
"start": 14644,
"end": 14953
} | class ____ implements SockJsMessageCodec {
@Override
public String encode(String... messages) {
return null;
}
@Override
public String[] decode(String content) {
return new String[0];
}
@Override
public String[] decodeInputStream(InputStream content) {
return new String[0];
}
}
| TestMessageCodec |
java | quarkusio__quarkus | integration-tests/injectmock/src/test/java/io/quarkus/it/mockbean/RequestScopedFooMockTest.java | {
"start": 322,
"end": 800
} | class ____ {
@InjectMock
RequestScopedFoo foo;
@InjectMock
RequestScopedFooFromProducer foo2;
@Test
void testMock() {
when(foo.ping()).thenReturn("pong");
when(foo2.ping()).thenReturn("pong2");
assertEquals("pong", foo.ping());
assertEquals("pong2", foo2.ping());
assertFalse(RequestScopedFoo.CONSTRUCTED.get());
assertFalse(RequestScopedFooFromProducer.CONSTRUCTED.get());
}
}
| RequestScopedFooMockTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestConfigurationHelper.java | {
"start": 1927,
"end": 1964
} | enum ____ no values.
*/
private | with |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/type/descriptor/java/OffsetTimeJavaTypeDescriptorTest.java | {
"start": 1064,
"end": 2620
} | class ____ {
@Test
@JiraKey("HHH-17229")
public void testWrap() {
final OffsetTimeJavaType javaType = OffsetTimeJavaType.INSTANCE;
final WrapperOptions wrapperOptions = new WrapperOptions() {
@Override
public SharedSessionContractImplementor getSession() {
return null;
}
public boolean useStreamForLobBinding() {
return false;
}
@Override
public int getPreferredSqlTypeCodeForBoolean() {
return 0;
}
@Override
public boolean useLanguageTagForLocale() {
return true;
}
public LobCreator getLobCreator() {
return NonContextualLobCreator.INSTANCE;
}
public JdbcType remapSqlTypeDescriptor(JdbcType sqlTypeDescriptor) {
return sqlTypeDescriptor;
}
@Override
public TimeZone getJdbcTimeZone() {
return null;
}
@Override
public Dialect getDialect() {
return null;
}
@Override
public TypeConfiguration getTypeConfiguration() {
return null;
}
@Override
public FormatMapper getXmlFormatMapper() {
return null;
}
@Override
public FormatMapper getJsonFormatMapper() {
return null;
}
};
final Time sqlTime = new Time(
LocalDate.EPOCH.atTime( LocalTime.of( 0, 1, 2, 0 ) )
.toInstant( ZoneOffset.ofHours( 4 ) )
.plusMillis( 123 )
.toEpochMilli()
);
final OffsetTime wrappedSqlTime = javaType.wrap( sqlTime, wrapperOptions );
assertThat( wrappedSqlTime ).isEqualTo( LocalTime.of( 20, 1, 2, 123_000_000 ).atOffset( OffsetDateTime.now().getOffset() ) );
}
}
| OffsetTimeJavaTypeDescriptorTest |
java | spring-projects__spring-boot | module/spring-boot-devtools/src/intTest/java/org/springframework/boot/devtools/tests/ApplicationState.java | {
"start": 868,
"end": 1937
} | class ____ {
private final Instant launchTime;
private final Integer serverPort;
private final FileContents out;
private final FileContents err;
ApplicationState(File serverPortFile, LaunchedJvm jvm) {
this(serverPortFile, jvm.getStandardOut(), jvm.getStandardError(), jvm.getLaunchTime());
}
ApplicationState(File serverPortFile, LaunchedApplication application) {
this(serverPortFile, application.getStandardOut(), application.getStandardError(), application.getLaunchTime());
}
private ApplicationState(File serverPortFile, File out, File err, Instant launchTime) {
this.serverPort = new FileContents(serverPortFile).get(Integer::parseInt);
this.out = new FileContents(out);
this.err = new FileContents(err);
this.launchTime = launchTime;
}
boolean hasServerPort() {
return this.serverPort != null;
}
int getServerPort() {
return this.serverPort;
}
@Override
public String toString() {
return String.format("Application launched at %s produced output:%n%s%n%s", this.launchTime, this.out,
this.err);
}
}
| ApplicationState |
java | apache__camel | components/camel-http/src/main/java/org/apache/camel/component/http/cloud/HttpServiceExpression.java | {
"start": 965,
"end": 1042
} | class ____ added to allow further customizations.
*/
@Deprecated
public final | is |
java | elastic__elasticsearch | x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/optimizer/EqlFoldSpec.java | {
"start": 378,
"end": 2109
} | class ____ {
private final String name;
private final String description;
private final String expression;
private final Object expected;
EqlFoldSpec(String name, String description, String expression, Object expected) {
this.name = name;
this.description = description;
this.expression = expression;
this.expected = expected;
}
public String expression() {
return expression;
}
public Object expected() {
return expected;
}
public String toString() {
StringBuilder sb = new StringBuilder();
appendWithComma(sb, "name", name);
appendWithComma(sb, "expression", expression);
appendWithComma(sb, "expected", expected == null ? "null" : expected);
return sb.toString();
}
private static void appendWithComma(StringBuilder builder, String key, Object value) {
if (value != null) {
String valueStr = value.toString();
if (Strings.isEmpty(valueStr) == false) {
if (builder.length() > 0) {
builder.append(", ");
}
builder.append(key);
builder.append(": ");
builder.append(valueStr);
}
}
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
EqlFoldSpec that = (EqlFoldSpec) other;
return Objects.equals(this.expression, that.expression);
}
@Override
public int hashCode() {
return Objects.hash(this.expression);
}
}
| EqlFoldSpec |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/cfg/ConfigFeature.java | {
"start": 277,
"end": 1105
} | interface ____
{
/**
* Accessor for checking whether this feature is enabled by default.
*/
public boolean enabledByDefault();
/**
* Returns bit mask for this feature instance
*/
public int getMask();
/**
* Convenience method for checking whether feature is enabled in given bitmask
*/
public boolean enabledIn(int flags);
/**
* Method that calculates bit set (flags) of all features that
* are enabled by default.
*/
public static <F extends Enum<F> & ConfigFeature> int collectFeatureDefaults(Class<F> enumClass)
{
int flags = 0;
for (F value : enumClass.getEnumConstants()) {
if (value.enabledByDefault()) {
flags |= value.getMask();
}
}
return flags;
}
}
| ConfigFeature |
java | apache__dubbo | dubbo-common/src/main/java/org/apache/dubbo/config/RegistryConfig.java | {
"start": 1688,
"end": 14279
} | class ____ extends AbstractConfig {
private static final long serialVersionUID = 5508512956753757169L;
public static final String NO_AVAILABLE = "N/A";
/**
* Register center address.
*/
private String address;
/**
* Username to login the register center.
*/
private String username;
/**
* Password to login the register center.
*/
private String password;
/**
* Default port for the register center.
*/
private Integer port;
/**
* Protocol used for the register center.
*/
private String protocol;
/**
* Network transmission type.
*/
private String transporter;
/**
* Server implementation.
*/
private String server;
/**
* Client implementation.
*/
private String client;
/**
* Affects how traffic distributes among registries, useful when subscribing to multiple registries.
* Available options:
* - "zone-aware": A certain type of traffic always goes to one Registry according to where the traffic is originated.
*/
private String cluster;
/**
* The region where the registry belongs, usually used to isolate traffics.
*/
private String zone;
/**
* The group that services registry belongs to.
*/
private String group;
/**
* Version of the registry.
*/
private String version;
/**
* Connect timeout in milliseconds for the register center.
*/
private Integer timeout;
/**
* Session timeout in milliseconds for the register center.
*/
private Integer session;
/**
* File for saving the register center dynamic list.
*/
private String file;
/**
* Wait time before stopping.
*/
private Integer wait;
/**
* Whether to check if the register center is available when booting up.
*/
private Boolean check;
/**
* Whether to allow dynamic service registration on the register center.
*/
private Boolean dynamic;
/**
* Whether to allow exporting service on the register center.
*/
private Boolean register;
/**
* Whether to allow subscribing to services on the register center.
*/
private Boolean subscribe;
/**
* Customized parameters.
*/
private Map<String, String> parameters;
/**
* Simplify the registry, useful for both providers and consumers.
*
* @since 2.7.0
*/
private Boolean simplified;
/**
* After simplifying the registry, add some parameters individually, useful for providers.
* Example: extra-keys = "A, b, c, d".
*
* @since 2.7.0
*/
private String extraKeys;
/**
* Indicates whether the address works as a configuration center or not.
*/
private Boolean useAsConfigCenter;
/**
* Indicates whether the address works as a remote metadata center or not.
*/
private Boolean useAsMetadataCenter;
/**
* List of RPC protocols accepted by this registry, e.g., "dubbo,rest".
*/
private String accepts;
/**
* Always use this registry first if set to true, useful when subscribing to multiple registries.
*/
private Boolean preferred;
/**
* Affects traffic distribution among registries, useful when subscribing to multiple registries.
* Takes effect only when no preferred registry is specified.
*/
private Integer weight;
/**
* Register mode.
*/
private String registerMode;
/**
* Enable empty protection.
*/
private Boolean enableEmptyProtection;
/**
* Security settings.
*/
private String secure;
public String getSecure() {
return secure;
}
public void setSecure(String secure) {
this.secure = secure;
}
public RegistryConfig() {}
public RegistryConfig(ApplicationModel applicationModel) {
super(applicationModel);
}
public RegistryConfig(String address) {
setAddress(address);
}
public RegistryConfig(ApplicationModel applicationModel, String address) {
super(applicationModel);
setAddress(address);
}
public RegistryConfig(String address, String protocol) {
setAddress(address);
setProtocol(protocol);
}
public RegistryConfig(ApplicationModel applicationModel, String address, String protocol) {
super(applicationModel);
setAddress(address);
setProtocol(protocol);
}
@Override
public String getId() {
return super.getId();
}
public String getProtocol() {
return protocol;
}
public void setProtocol(String protocol) {
this.protocol = protocol;
}
@Parameter(excluded = true)
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
if (address != null) {
try {
URL url = URL.valueOf(address);
// Refactor since 2.7.8
updatePropertyIfAbsent(this::getUsername, this::setUsername, url.getUsername());
updatePropertyIfAbsent(this::getPassword, this::setPassword, url.getPassword());
updatePropertyIfAbsent(this::getProtocol, this::setProtocol, url.getProtocol());
updatePropertyIfAbsent(this::getPort, this::setPort, url.getPort());
Map<String, String> params = url.getParameters();
if (CollectionUtils.isNotEmptyMap(params)) {
params.remove(BACKUP_KEY);
}
updateParameters(params);
} catch (Exception ignored) {
}
}
}
public Integer getPort() {
return port;
}
public void setPort(Integer port) {
this.port = port;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
/**
* @return wait
* @see org.apache.dubbo.config.ProviderConfig#getWait()
* @deprecated
*/
@Deprecated
public Integer getWait() {
return wait;
}
/**
* @param wait
* @see org.apache.dubbo.config.ProviderConfig#setWait(Integer)
* @deprecated
*/
@Deprecated
public void setWait(Integer wait) {
this.wait = wait;
if (wait != null && wait > 0) {
System.setProperty(SHUTDOWN_WAIT_KEY, String.valueOf(wait));
}
}
public Boolean isCheck() {
return check;
}
public void setCheck(Boolean check) {
this.check = check;
}
public String getFile() {
return file;
}
public void setFile(String file) {
this.file = file;
}
/**
* @return transport
* @see #getTransporter()
* @deprecated
*/
@Deprecated
@Parameter(excluded = true, attribute = false)
public String getTransport() {
return getTransporter();
}
/**
* @param transport
* @see #setTransporter(String)
* @deprecated
*/
@Deprecated
public void setTransport(String transport) {
setTransporter(transport);
}
public String getTransporter() {
return transporter;
}
public void setTransporter(String transporter) {
/*if(transporter != null && transporter.length() > 0 && ! this.getExtensionLoader(Transporter.class).hasExtension(transporter)){
throw new IllegalStateException("No such transporter type : " + transporter);
}*/
this.transporter = transporter;
}
public String getServer() {
return server;
}
public void setServer(String server) {
/*if(server != null && server.length() > 0 && ! this.getExtensionLoader(Transporter.class).hasExtension(server)){
throw new IllegalStateException("No such server type : " + server);
}*/
this.server = server;
}
public String getClient() {
return client;
}
public void setClient(String client) {
/*if(client != null && client.length() > 0 && ! this.getExtensionLoader(Transporter.class).hasExtension(client)){
throw new IllegalStateException("No such client type : " + client);
}*/
this.client = client;
}
public Integer getTimeout() {
return timeout;
}
public void setTimeout(Integer timeout) {
this.timeout = timeout;
}
public Integer getSession() {
return session;
}
public void setSession(Integer session) {
this.session = session;
}
public Boolean isDynamic() {
return dynamic;
}
public void setDynamic(Boolean dynamic) {
this.dynamic = dynamic;
}
public Boolean isRegister() {
return register;
}
public void setRegister(Boolean register) {
this.register = register;
}
public Boolean isSubscribe() {
return subscribe;
}
public void setSubscribe(Boolean subscribe) {
this.subscribe = subscribe;
}
public String getCluster() {
return cluster;
}
public void setCluster(String cluster) {
this.cluster = cluster;
}
public String getZone() {
return zone;
}
public void setZone(String zone) {
this.zone = zone;
}
public String getGroup() {
return group;
}
public void setGroup(String group) {
this.group = group;
}
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
public Map<String, String> getParameters() {
return parameters;
}
public void setParameters(Map<String, String> parameters) {
this.parameters = parameters;
}
public void updateParameters(Map<String, String> parameters) {
if (CollectionUtils.isEmptyMap(parameters)) {
return;
}
if (this.parameters == null) {
this.parameters = parameters;
} else {
this.parameters.putAll(parameters);
}
}
public Boolean getSimplified() {
return simplified;
}
public void setSimplified(Boolean simplified) {
this.simplified = simplified;
}
@Parameter(key = EXTRA_KEYS_KEY)
public String getExtraKeys() {
return extraKeys;
}
public void setExtraKeys(String extraKeys) {
this.extraKeys = extraKeys;
}
@Parameter(excluded = true)
public Boolean getUseAsConfigCenter() {
return useAsConfigCenter;
}
public void setUseAsConfigCenter(Boolean useAsConfigCenter) {
this.useAsConfigCenter = useAsConfigCenter;
}
@Parameter(excluded = true)
public Boolean getUseAsMetadataCenter() {
return useAsMetadataCenter;
}
public void setUseAsMetadataCenter(Boolean useAsMetadataCenter) {
this.useAsMetadataCenter = useAsMetadataCenter;
}
public String getAccepts() {
return accepts;
}
public void setAccepts(String accepts) {
this.accepts = accepts;
}
public Boolean getPreferred() {
return preferred;
}
public void setPreferred(Boolean preferred) {
this.preferred = preferred;
}
public Integer getWeight() {
return weight;
}
public void setWeight(Integer weight) {
this.weight = weight;
}
@Parameter(key = REGISTER_MODE_KEY)
public String getRegisterMode() {
return registerMode;
}
public void setRegisterMode(String registerMode) {
this.registerMode = registerMode;
}
@Parameter(key = ENABLE_EMPTY_PROTECTION_KEY)
public Boolean getEnableEmptyProtection() {
return enableEmptyProtection;
}
public void setEnableEmptyProtection(Boolean enableEmptyProtection) {
this.enableEmptyProtection = enableEmptyProtection;
}
@Override
@Parameter(excluded = true, attribute = false)
public boolean isValid() {
// empty protocol will default to 'dubbo'
return !StringUtils.isEmpty(address) || !StringUtils.isEmpty(protocol);
}
@Override
@Parameter(excluded = true)
public Boolean isDefault() {
return isDefault;
}
}
| RegistryConfig |
java | netty__netty | codec-http3/src/main/java/io/netty/handler/codec/http3/Http3MaxPushIdFrame.java | {
"start": 790,
"end": 1098
} | interface ____ extends Http3ControlStreamFrame {
@Override
default long type() {
return Http3CodecUtils.HTTP3_MAX_PUSH_ID_FRAME_TYPE;
}
/**
* Returns the maximum value for a Push ID that the server can use.
*
* @return the id.
*/
long id();
}
| Http3MaxPushIdFrame |
java | apache__camel | core/camel-management/src/test/java/org/apache/camel/management/ManagedCustomLoadBalancerTest.java | {
"start": 3235,
"end": 3950
} | class ____ extends LoadBalancerSupport {
@Override
public boolean process(Exchange exchange, AsyncCallback callback) {
String body = exchange.getIn().getBody(String.class);
try {
if ("x".equals(body)) {
getProcessors().get(0).process(exchange);
} else if ("y".equals(body)) {
getProcessors().get(1).process(exchange);
} else {
getProcessors().get(2).process(exchange);
}
} catch (Throwable e) {
exchange.setException(e);
}
callback.done(true);
return true;
}
}
}
| MyLoadBalancer |
java | apache__maven | impl/maven-impl/src/main/java/org/apache/maven/impl/cache/CacheSelector.java | {
"start": 3168,
"end": 3264
} | class ____ check
* @param typeName the type name to match against
* @return true if the | to |
java | quarkusio__quarkus | independent-projects/bootstrap/core/src/main/java/io/quarkus/bootstrap/classloading/MemoryClassPathElement.java | {
"start": 3989,
"end": 5141
} | class ____ extends URLStreamHandler {
private final byte[] bytes;
private long lastModified;
public MemoryUrlStreamHandler(byte[] bytes, long lastModified) {
this.bytes = bytes;
this.lastModified = lastModified;
}
@Override
protected URLConnection openConnection(final URL u) throws IOException {
return new URLConnection(u) {
@Override
public void connect() throws IOException {
}
@Override
public InputStream getInputStream() throws IOException {
return new ByteArrayInputStream(bytes);
}
@Override
public long getLastModified() {
return lastModified;
}
@Override
public int getContentLength() {
return bytes.length;
}
@Override
public long getContentLengthLong() {
return bytes.length;
}
};
}
}
}
| MemoryUrlStreamHandler |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/CoalesceFunctionITCase.java | {
"start": 1272,
"end": 2668
} | class ____ extends BuiltInFunctionTestBase {
@Override
Stream<TestSetSpec> getTestSetSpecs() {
return Stream.of(
TestSetSpec.forFunction(BuiltInFunctionDefinitions.COALESCE)
.onFieldsWithData(null, null, 1)
.andDataTypes(BIGINT().nullable(), INT().nullable(), INT().notNull())
.testResult(
coalesce($("f0"), $("f1")),
"COALESCE(f0, f1)",
null,
BIGINT().nullable())
.testResult(
coalesce($("f0"), $("f2")),
"COALESCE(f0, f2)",
1L,
BIGINT().notNull())
.testResult(
coalesce($("f1"), $("f2")), "COALESCE(f1, f2)", 1, INT().notNull())
.testResult(
coalesce($("f0"), 1),
"COALESCE(f0, 1)",
1L,
// In this case, the return type is not null because we have a
// constant in the function invocation
BIGINT().notNull()));
}
}
| CoalesceFunctionITCase |
java | apache__avro | lang/java/avro/src/main/java/org/apache/avro/util/internal/Accessor.java | {
"start": 1500,
"end": 1862
} | class ____ {
protected abstract JsonNode defaultValue(Field field);
protected abstract Field createField(String name, Schema schema, String doc, JsonNode defaultValue,
boolean validate, Order order);
protected abstract Field createField(String name, Schema schema, String doc, JsonNode defaultValue);
}
public abstract static | FieldAccessor |
java | alibaba__nacos | console/src/test/java/com/alibaba/nacos/console/paramcheck/ConsoleDefaultHttpParamExtractorTest.java | {
"start": 1191,
"end": 3095
} | class ____ {
@Mock
HttpServletRequest mockRequest;
ConsoleDefaultHttpParamExtractor extractor;
@BeforeEach
void setUp() {
extractor = new ConsoleDefaultHttpParamExtractor();
}
@Test
void extractParamWithNamespaceId() {
when(mockRequest.getParameter("namespaceId")).thenReturn("test");
List<ParamInfo> actual = extractor.extractParam(mockRequest);
assertEquals("test", actual.get(0).getNamespaceId());
assertNull(actual.get(0).getNamespaceShowName());
}
@Test
void extractParamWithCustomNamespaceId() {
when(mockRequest.getParameter("namespaceId")).thenReturn(null);
when(mockRequest.getParameter("customNamespaceId")).thenReturn("test1");
List<ParamInfo> actual = extractor.extractParam(mockRequest);
assertEquals("test1", actual.get(0).getNamespaceId());
assertNull(actual.get(0).getNamespaceShowName());
}
@Test
void extractParamWithNamespaceName() {
when(mockRequest.getParameter("namespaceId")).thenReturn(null);
when(mockRequest.getParameter("customNamespaceId")).thenReturn(null);
when(mockRequest.getParameter("namespaceName")).thenReturn("testName");
List<ParamInfo> actual = extractor.extractParam(mockRequest);
assertEquals("testName", actual.get(0).getNamespaceShowName());
assertNull(actual.get(0).getNamespaceId());
}
@Test
void extractParamWithFullNamespace() {
when(mockRequest.getParameter("namespaceId")).thenReturn("test");
when(mockRequest.getParameter("namespaceName")).thenReturn("testName");
List<ParamInfo> actual = extractor.extractParam(mockRequest);
assertEquals("test", actual.get(0).getNamespaceId());
assertEquals("testName", actual.get(0).getNamespaceShowName());
}
} | ConsoleDefaultHttpParamExtractorTest |
java | alibaba__nacos | plugin/config/src/main/java/com/alibaba/nacos/plugin/config/constants/ConfigChangeExecuteTypes.java | {
"start": 736,
"end": 1066
} | enum ____ {
/**
* Execute before pointcut.
*/
EXECUTE_BEFORE_TYPE,
/**
* Execute after pointcut.
*/
EXECUTE_AFTER_TYPE;
public boolean equals(ConfigChangeExecuteTypes configChangeExecuteTypes) {
return this.compareTo(configChangeExecuteTypes) == 0;
}
}
| ConfigChangeExecuteTypes |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/usage/UsageServiceTests.java | {
"start": 1180,
"end": 7863
} | class ____ extends ESTestCase {
/**
* Test that we can not add a null reference to a {@link org.elasticsearch.rest.RestHandler} to the {@link UsageService}.
*/
public void testHandlerCanNotBeNull() {
final UsageService service = new UsageService();
expectThrows(NullPointerException.class, () -> service.addRestHandler(null));
}
/**
* Test that we can not add an instance of a {@link org.elasticsearch.rest.RestHandler} with no name to the {@link UsageService}.
*/
public void testAHandlerWithNoName() {
final UsageService service = new UsageService();
final BaseRestHandler horse = new MockRestHandler(null);
final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> service.addRestHandler(horse));
assertThat(
e.getMessage(),
equalTo("handler of type [org.elasticsearch.usage.UsageServiceTests$MockRestHandler] does not have a name")
);
}
/**
* Test that we can add the same instance of a {@link org.elasticsearch.rest.RestHandler} to the {@link UsageService} multiple times.
*/
public void testHandlerWithConflictingNamesButSameInstance() {
final UsageService service = new UsageService();
final String name = randomAlphaOfLength(8);
final BaseRestHandler first = new MockRestHandler(name);
service.addRestHandler(first);
// nothing bad ever happens to me
service.addRestHandler(first);
}
/**
* Test that we can not add different instances of {@link org.elasticsearch.rest.RestHandler} with the same name to the
* {@link UsageService}.
*/
public void testHandlersWithConflictingNamesButDifferentInstances() {
final UsageService service = new UsageService();
final String name = randomAlphaOfLength(8);
final BaseRestHandler first = new MockRestHandler(name);
final BaseRestHandler second = new MockRestHandler(name);
service.addRestHandler(first);
final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> service.addRestHandler(second));
final String expected = String.format(
Locale.ROOT,
"handler of type [%s] conflicts with handler of type [%1$s] as they both have the same name [%s]",
"org.elasticsearch.usage.UsageServiceTests$MockRestHandler",
name
);
assertThat(e.getMessage(), equalTo(expected));
}
public void testRestUsage() throws Exception {
RestRequest restRequest = new FakeRestRequest();
BaseRestHandler handlerA = new MockRestHandler("a");
BaseRestHandler handlerB = new MockRestHandler("b");
BaseRestHandler handlerC = new MockRestHandler("c");
BaseRestHandler handlerD = new MockRestHandler("d");
BaseRestHandler handlerE = new MockRestHandler("e");
BaseRestHandler handlerF = new MockRestHandler("f");
UsageService usageService = new UsageService();
usageService.addRestHandler(handlerA);
usageService.addRestHandler(handlerB);
usageService.addRestHandler(handlerC);
usageService.addRestHandler(handlerD);
usageService.addRestHandler(handlerE);
usageService.addRestHandler(handlerF);
try (var threadPool = createThreadPool()) {
final var client = new NoOpNodeClient(threadPool);
handlerA.handleRequest(restRequest, null, client);
handlerB.handleRequest(restRequest, null, client);
handlerA.handleRequest(restRequest, null, client);
handlerA.handleRequest(restRequest, null, client);
handlerB.handleRequest(restRequest, null, client);
handlerC.handleRequest(restRequest, null, client);
handlerC.handleRequest(restRequest, null, client);
handlerD.handleRequest(restRequest, null, client);
handlerA.handleRequest(restRequest, null, client);
handlerB.handleRequest(restRequest, null, client);
handlerE.handleRequest(restRequest, null, client);
handlerF.handleRequest(restRequest, null, client);
handlerC.handleRequest(restRequest, null, client);
handlerD.handleRequest(restRequest, null, client);
}
Map<String, Long> restUsage = usageService.getRestUsageStats();
assertThat(restUsage, notNullValue());
assertThat(restUsage.size(), equalTo(6));
assertThat(restUsage.get("a"), equalTo(4L));
assertThat(restUsage.get("b"), equalTo(3L));
assertThat(restUsage.get("c"), equalTo(3L));
assertThat(restUsage.get("d"), equalTo(2L));
assertThat(restUsage.get("e"), equalTo(1L));
assertThat(restUsage.get("f"), equalTo(1L));
}
@SuppressWarnings("unchecked")
public void testAggsUsage() throws Exception {
AggregationUsageService.Builder builder = new AggregationUsageService.Builder();
builder.registerAggregationUsage("a", "x");
builder.registerAggregationUsage("a", "y");
builder.registerAggregationUsage("b", "x");
builder.registerAggregationUsage("c");
builder.registerAggregationUsage("b", "y");
builder.registerAggregationUsage("a", "z");
AggregationUsageService usageService = builder.build();
usageService.incAggregationUsage("a", "x");
for (int i = 0; i < 2; i++) {
usageService.incAggregationUsage("a", "y");
}
for (int i = 0; i < 3; i++) {
usageService.incAggregationUsage("a", "z");
}
for (int i = 0; i < 4; i++) {
usageService.incAggregationUsage("b", "x");
}
for (int i = 0; i < 5; i++) {
usageService.incAggregationUsage("b", "y");
}
for (int i = 0; i < 6; i++) {
usageService.incAggregationUsage("c", OTHER_SUBTYPE);
}
Map<String, Object> aggsUsage = usageService.getUsageStats();
assertThat(aggsUsage, notNullValue());
assertThat(aggsUsage.size(), equalTo(3));
assertThat(((Map<String, Object>) aggsUsage.get("a")).get("x"), equalTo(1L));
assertThat(((Map<String, Object>) aggsUsage.get("a")).get("y"), equalTo(2L));
assertThat(((Map<String, Object>) aggsUsage.get("a")).get("z"), equalTo(3L));
assertThat(((Map<String, Object>) aggsUsage.get("b")).get("x"), equalTo(4L));
assertThat(((Map<String, Object>) aggsUsage.get("b")).get("y"), equalTo(5L));
assertThat(((Map<String, Object>) aggsUsage.get("c")).get(OTHER_SUBTYPE), equalTo(6L));
}
private | UsageServiceTests |
java | apache__dubbo | dubbo-remoting/dubbo-remoting-api/src/main/java/org/apache/dubbo/remoting/utils/UrlUtils.java | {
"start": 1550,
"end": 6211
} | class ____ {
private static final String ALLOWED_SERIALIZATION_KEY = "allowedSerialization";
public static int getCloseTimeout(URL url) {
String configuredCloseTimeout = SystemPropertyConfigUtils.getSystemProperty(
CommonConstants.DubboProperty.DUBBO_CLOSE_TIMEOUT_CONFIG_KEY);
int defaultCloseTimeout = -1;
if (StringUtils.isNotEmpty(configuredCloseTimeout)) {
try {
defaultCloseTimeout = Integer.parseInt(configuredCloseTimeout);
} catch (NumberFormatException e) {
// use default heartbeat
}
}
if (defaultCloseTimeout < 0) {
defaultCloseTimeout = getIdleTimeout(url);
}
int closeTimeout = url.getParameter(Constants.CLOSE_TIMEOUT_KEY, defaultCloseTimeout);
int heartbeat = getHeartbeat(url);
if (closeTimeout < heartbeat * 2) {
throw new IllegalStateException("closeTimeout < heartbeatInterval * 2");
}
return closeTimeout;
}
public static int getIdleTimeout(URL url) {
int heartBeat = getHeartbeat(url);
// idleTimeout should be at least more than twice heartBeat because possible retries of client.
int idleTimeout = url.getParameter(Constants.HEARTBEAT_TIMEOUT_KEY, heartBeat * 3);
if (idleTimeout < heartBeat * 2) {
throw new IllegalStateException("idleTimeout < heartbeatInterval * 2");
}
return idleTimeout;
}
public static int getHeartbeat(URL url) {
String configuredHeartbeat =
SystemPropertyConfigUtils.getSystemProperty(CommonConstants.DubboProperty.DUBBO_HEARTBEAT_CONFIG_KEY);
int defaultHeartbeat = Constants.DEFAULT_HEARTBEAT;
if (StringUtils.isNotEmpty(configuredHeartbeat)) {
try {
defaultHeartbeat = Integer.parseInt(configuredHeartbeat);
} catch (NumberFormatException e) {
// use default heartbeat
}
}
return url.getParameter(Constants.HEARTBEAT_KEY, defaultHeartbeat);
}
/**
* Get the serialization id
*
* @param url url
* @return {@link Byte}
*/
public static Byte serializationId(URL url) {
Byte serializationId;
// Obtain the value from prefer_serialization. Such as.fastjson2,hessian2
List<String> preferSerials = preferSerialization(url);
for (String preferSerial : preferSerials) {
if ((serializationId = CodecSupport.getIDByName(preferSerial)) != null) {
return serializationId;
}
}
// Secondly, obtain the value from serialization
if ((serializationId = CodecSupport.getIDByName(url.getParameter(SERIALIZATION_KEY))) != null) {
return serializationId;
}
// Finally, use the default serialization type
return CodecSupport.getIDByName(DefaultSerializationSelector.getDefaultRemotingSerialization());
}
/**
* Get the serialization or default serialization
*
* @param url url
* @return {@link String}
*/
public static String serializationOrDefault(URL url) {
// noinspection OptionalGetWithoutIsPresent
Optional<String> serializations = allSerializations(url).stream().findFirst();
return serializations.orElseGet(DefaultSerializationSelector::getDefaultRemotingSerialization);
}
/**
* Get the all serializations,ensure insertion order
*
* @param url url
* @return {@link List}<{@link String}>
*/
@SuppressWarnings("unchecked")
public static Collection<String> allSerializations(URL url) {
// preferSerialization -> serialization -> default serialization
Set<String> serializations = new LinkedHashSet<>(preferSerialization(url));
Optional.ofNullable(url.getParameter(SERIALIZATION_KEY))
.filter(StringUtils::isNotBlank)
.ifPresent(serializations::add);
serializations.add(DefaultSerializationSelector.getDefaultRemotingSerialization());
return Collections.unmodifiableSet(serializations);
}
/**
* Prefer Serialization
*
* @param url url
* @return {@link List}<{@link String}>
*/
public static List<String> preferSerialization(URL url) {
String preferSerialization = url.getParameter(PREFER_SERIALIZATION_KEY);
if (StringUtils.isNotBlank(preferSerialization)) {
return Collections.unmodifiableList(StringUtils.splitToList(preferSerialization, ','));
}
return Collections.emptyList();
}
}
| UrlUtils |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ClassCanBeStaticTest.java | {
"start": 13022,
"end": 13080
} | class ____ {
static void f() {
| A |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/JUnitAmbiguousTestClassTest.java | {
"start": 1660,
"end": 1997
} | class ____ {
@Test
public void testCase() {}
}
""")
.doTest();
}
@Test
public void negativeNoAnnotations() {
compilationHelper
.addSourceLines(
"Positive.java",
"""
import junit.framework.TestCase;
public | Positive |
java | apache__kafka | server/src/test/java/org/apache/kafka/server/share/fetch/ShareFetchTestUtils.java | {
"start": 1706,
"end": 6126
} | class ____ {
/**
* Validate that the rotated list is equal to the original list rotated by the given position.
*
* @param original The original list.
* @param result The rotated list.
* @param rotationAt The position to rotate the elements at.
*/
public static void validateRotatedListEquals(
List<TopicIdPartition> original,
List<TopicIdPartition> result,
int rotationAt
) {
TopicIdPartition[] originalKeysArray = new TopicIdPartition[original.size()];
int i = 0;
for (TopicIdPartition key : original) {
if (i < rotationAt) {
originalKeysArray[original.size() - rotationAt + i] = key;
} else {
originalKeysArray[i - rotationAt] = key;
}
i++;
}
assertArrayEquals(originalKeysArray, result.toArray());
}
/**
* Create a file records with the given offset values, the number of records from each given start
* offset.
*
* @param recordsPerOffset The offset values and the number of records to create from given offset.
* @return The file records.
* @throws IOException If the file records cannot be created.
*/
public static FileRecords createFileRecords(Map<Long, Integer> recordsPerOffset) throws IOException {
FileRecords fileRecords = FileRecords.open(tempFile());
for (Entry<Long, Integer> entry : recordsPerOffset.entrySet()) {
try (MemoryRecordsBuilder records = memoryRecordsBuilder(entry.getKey(), entry.getValue())) {
fileRecords.append(records.build());
}
}
return fileRecords;
}
/**
* Create a memory records builder with the given number of records and start offset.
*
* @param startOffset The start offset of the records.
* @param numOfRecords The number of records to create.
* @return The memory records builder.
*/
public static MemoryRecordsBuilder memoryRecordsBuilder(long startOffset, int numOfRecords) {
return memoryRecordsBuilder(ByteBuffer.allocate(1024), startOffset, numOfRecords);
}
/**
* Create a memory records builder with the number of records and start offset, in the given buffer.
*
* @param buffer The buffer to write the records to.
* @param startOffset The start offset of the records.
* @param numOfRecords The number of records to create.
* @return The memory records builder.
*/
public static MemoryRecordsBuilder memoryRecordsBuilder(ByteBuffer buffer, long startOffset, int numOfRecords) {
MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, Compression.NONE,
TimestampType.CREATE_TIME, startOffset, 2);
for (int i = 0; i < numOfRecords; i++) {
builder.appendWithOffset(startOffset + i, 0L, TestUtils.randomString(10).getBytes(), TestUtils.randomString(10).getBytes());
}
return builder;
}
/**
* Create a share acquired records from the given acquired records.
*
* @param acquiredRecords The acquired records to create the share acquired records from.
* @return The share acquired records.
*/
public static ShareAcquiredRecords createShareAcquiredRecords(AcquiredRecords acquiredRecords) {
return new ShareAcquiredRecords(
List.of(acquiredRecords), (int) (acquiredRecords.lastOffset() - acquiredRecords.firstOffset() + 1)
);
}
/**
* Fetch the gauge value from the yammer metrics.
*
* @param name The name of the metric.
* @return The gauge value as a number.
*/
public static Number yammerMetricValue(String name) {
try {
Gauge gauge = (Gauge) KafkaYammerMetrics.defaultRegistry().allMetrics().entrySet().stream()
.filter(e -> e.getKey().getMBeanName().contains(name))
.findFirst()
.orElseThrow()
.getValue();
return (Number) gauge.value();
} catch (Exception e) {
return 0;
}
}
/**
* Clear all the yammer metrics.
*/
public static void clearYammerMetrics() {
KafkaYammerMetrics.defaultRegistry().allMetrics().keySet().forEach(
metricName -> KafkaYammerMetrics.defaultRegistry().removeMetric(metricName)
);
}
}
| ShareFetchTestUtils |
java | grpc__grpc-java | interop-testing/src/test/java/io/grpc/testing/integration/OpenTelemetryContextPropagationTest.java | {
"start": 2013,
"end": 7425
} | class ____ extends AbstractInteropTest {
private final OpenTelemetrySdk openTelemetrySdk;
private final Tracer tracer;
private final GrpcOpenTelemetry grpcOpenTelemetry;
private final AtomicReference<Span> applicationSpan = new AtomicReference<>();
private final boolean censusClient;
@Parameterized.Parameters(name = "ContextPropagator={0}, CensusClient={1}")
public static Iterable<Object[]> data() {
return Arrays.asList(new Object[][] {
{W3CTraceContextPropagator.getInstance(), false},
{GrpcTraceBinContextPropagator.defaultInstance(), false},
{GrpcTraceBinContextPropagator.defaultInstance(), true}
});
}
public OpenTelemetryContextPropagationTest(TextMapPropagator textMapPropagator,
boolean isCensusClient) {
this.openTelemetrySdk = OpenTelemetrySdk.builder()
.setTracerProvider(SdkTracerProvider.builder().build())
.setPropagators(ContextPropagators.create(TextMapPropagator.composite(
textMapPropagator
)))
.build();
this.tracer = openTelemetrySdk
.getTracer("grpc-java-interop-test");
GrpcOpenTelemetry.Builder grpcOpentelemetryBuilder = GrpcOpenTelemetry.newBuilder()
.sdk(openTelemetrySdk);
InternalGrpcOpenTelemetry.enableTracing(grpcOpentelemetryBuilder, true);
grpcOpenTelemetry = grpcOpentelemetryBuilder.build();
this.censusClient = isCensusClient;
}
@Override
protected ServerBuilder<?> getServerBuilder() {
NettyServerBuilder builder = NettyServerBuilder.forPort(0, InsecureServerCredentials.create())
.maxInboundMessageSize(AbstractInteropTest.MAX_MESSAGE_SIZE);
builder.intercept(new ServerInterceptor() {
@Override
public <ReqT, RespT> ServerCall.Listener<ReqT> interceptCall(ServerCall<ReqT, RespT> call,
Metadata headers, ServerCallHandler<ReqT, RespT> next) {
ServerCall.Listener<ReqT> listener = next.startCall(call, headers);
return new ForwardingServerCallListener<ReqT>() {
@Override
protected ServerCall.Listener<ReqT> delegate() {
return listener;
}
@Override
public void onMessage(ReqT request) {
applicationSpan.set(tracer.spanBuilder("InteropTest.Application.Span").startSpan());
delegate().onMessage(request);
}
@Override
public void onHalfClose() {
maybeCloseSpan(applicationSpan);
delegate().onHalfClose();
}
@Override
public void onCancel() {
maybeCloseSpan(applicationSpan);
delegate().onCancel();
}
@Override
public void onComplete() {
maybeCloseSpan(applicationSpan);
delegate().onComplete();
}
};
}
});
// To ensure proper propagation of remote spans from gRPC to your application, this interceptor
// must be after any application interceptors that interact with spans. This allows the tracing
// information to be correctly passed along. However, it's fine for application-level onMessage
// handlers to access the span.
grpcOpenTelemetry.configureServerBuilder(builder);
return builder;
}
private void maybeCloseSpan(AtomicReference<Span> applicationSpan) {
Span tmp = applicationSpan.get();
if (tmp != null) {
tmp.end();
}
}
@Override
protected boolean metricsExpected() {
return false;
}
@Override
protected ManagedChannelBuilder<?> createChannelBuilder() {
NettyChannelBuilder builder = NettyChannelBuilder.forAddress(getListenAddress())
.maxInboundMessageSize(AbstractInteropTest.MAX_MESSAGE_SIZE)
.usePlaintext();
if (!censusClient) {
// Disabling census-tracing is necessary to avoid trace ID mismatches.
// This is because census-tracing overrides the grpc-trace-bin header with
// OpenTelemetry's GrpcTraceBinPropagator.
InternalNettyChannelBuilder.setTracingEnabled(builder, false);
grpcOpenTelemetry.configureChannelBuilder(builder);
}
return builder;
}
@Test
public void otelSpanContextPropagation() {
Assume.assumeFalse(censusClient);
Span parentSpan = tracer.spanBuilder("Test.interopTest").startSpan();
try (Scope scope = Context.current().with(parentSpan).makeCurrent()) {
blockingStub.unaryCall(SimpleRequest.getDefaultInstance());
}
assertEquals(parentSpan.getSpanContext().getTraceId(),
applicationSpan.get().getSpanContext().getTraceId());
}
@Test
@SuppressWarnings("deprecation")
public void censusToOtelGrpcTraceBinPropagator() {
Assume.assumeTrue(censusClient);
io.opencensus.trace.Tracer censusTracer = io.opencensus.trace.Tracing.getTracer();
io.opencensus.trace.Span parentSpan = censusTracer.spanBuilder("Test.interopTest")
.startSpan();
io.grpc.Context context = io.opencensus.trace.unsafe.ContextUtils.withValue(
io.grpc.Context.current(), parentSpan);
io.grpc.Context previous = context.attach();
try {
blockingStub.unaryCall(SimpleRequest.getDefaultInstance());
assertEquals(parentSpan.getContext().getTraceId().toLowerBase16(),
applicationSpan.get().getSpanContext().getTraceId());
} finally {
context.detach(previous);
}
}
}
| OpenTelemetryContextPropagationTest |
java | elastic__elasticsearch | x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregatorFactory.java | {
"start": 1392,
"end": 5688
} | class ____ extends MultiValuesSourceAggregatorFactory {
private final TTestType testType;
private final int tails;
private final Query filterA;
private final Query filterB;
private Tuple<Weight, Weight> weights;
TTestAggregatorFactory(
String name,
Map<String, ValuesSourceConfig> configs,
TTestType testType,
int tails,
QueryBuilder filterA,
QueryBuilder filterB,
DocValueFormat format,
AggregationContext context,
AggregatorFactory parent,
AggregatorFactories.Builder subFactoriesBuilder,
Map<String, Object> metadata
) throws IOException {
super(name, configs, format, context, parent, subFactoriesBuilder, metadata);
this.testType = testType;
this.tails = tails;
this.filterA = filterA == null ? null : context.buildQuery(filterA);
this.filterB = filterB == null ? null : context.buildQuery(filterB);
}
@Override
protected Aggregator createUnmapped(Aggregator parent, Map<String, Object> metadata) throws IOException {
return switch (testType) {
case PAIRED -> new PairedTTestAggregator(name, null, tails, format, context, parent, metadata);
case HOMOSCEDASTIC -> new UnpairedTTestAggregator(name, null, tails, true, this::getWeights, format, context, parent, metadata);
case HETEROSCEDASTIC -> new UnpairedTTestAggregator(
name,
null,
tails,
false,
this::getWeights,
format,
context,
parent,
metadata
);
};
}
@Override
protected Aggregator doCreateInternal(
Map<String, ValuesSourceConfig> configs,
DocValueFormat format,
Aggregator parent,
CardinalityUpperBound cardinality,
Map<String, Object> metadata
) throws IOException {
MultiValuesSource.NumericMultiValuesSource numericMultiVS = new MultiValuesSource.NumericMultiValuesSource(configs);
if (numericMultiVS.areValuesSourcesEmpty()) {
return createUnmapped(parent, metadata);
}
switch (testType) {
case PAIRED:
if (filterA != null || filterB != null) {
throw new IllegalArgumentException("Paired t-test doesn't support filters");
}
return new PairedTTestAggregator(name, numericMultiVS, tails, format, context, parent, metadata);
case HOMOSCEDASTIC:
return new UnpairedTTestAggregator(name, numericMultiVS, tails, true, this::getWeights, format, context, parent, metadata);
case HETEROSCEDASTIC:
return new UnpairedTTestAggregator(name, numericMultiVS, tails, false, this::getWeights, format, context, parent, metadata);
default:
throw new IllegalArgumentException("Unsupported t-test type " + testType);
}
}
/**
* Returns the {@link Weight}s for this filters, creating it if
* necessary. This is done lazily so that the {@link Weight} is only created
* if the aggregation collects documents reducing the overhead of the
* aggregation in the case where no documents are collected.
*
* Note that as aggregations are initialsed and executed in a serial manner,
* no concurrency considerations are necessary here.
*/
public Tuple<Weight, Weight> getWeights() {
if (weights == null) {
weights = new Tuple<>(getWeight(filterA), getWeight(filterB));
}
return weights;
}
public Weight getWeight(Query filter) {
if (filter != null) {
IndexSearcher contextSearcher = context.searcher();
try {
return contextSearcher.createWeight(contextSearcher.rewrite(filter), ScoreMode.COMPLETE_NO_SCORES, 1f);
} catch (IOException e) {
throw new AggregationInitializationException("Failed to initialize filter", e);
}
}
return null;
}
@Override
public String getStatsSubtype() {
return configs.get(A_FIELD.getPreferredName()).valueSourceType().typeName();
}
}
| TTestAggregatorFactory |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/jdk8/MaybeFlattenStreamAsFlowableTest.java | {
"start": 1469,
"end": 12157
} | class ____ extends RxJavaTest {
@Test
public void successJust() {
Maybe.just(1)
.flattenStreamAsFlowable(Stream::of)
.test()
.assertResult(1);
}
@Test
public void successEmpty() {
Maybe.just(1)
.flattenStreamAsFlowable(v -> Stream.of())
.test()
.assertResult();
}
@Test
public void successMany() {
Maybe.just(1)
.flattenStreamAsFlowable(v -> Stream.of(2, 3, 4, 5, 6))
.test()
.assertResult(2, 3, 4, 5, 6);
}
@Test
public void successManyTake() {
Maybe.just(1)
.flattenStreamAsFlowable(v -> Stream.of(2, 3, 4, 5, 6))
.take(3)
.test()
.assertResult(2, 3, 4);
}
@Test
public void empty() throws Throwable {
@SuppressWarnings("unchecked")
Function<? super Integer, Stream<? extends Integer>> f = mock(Function.class);
Maybe.<Integer>empty()
.flattenStreamAsFlowable(f)
.test()
.assertResult();
verify(f, never()).apply(any());
}
@Test
public void error() throws Throwable {
@SuppressWarnings("unchecked")
Function<? super Integer, Stream<? extends Integer>> f = mock(Function.class);
Maybe.<Integer>error(new TestException())
.flattenStreamAsFlowable(f)
.test()
.assertFailure(TestException.class);
verify(f, never()).apply(any());
}
@Test
public void mapperCrash() {
Maybe.just(1)
.flattenStreamAsFlowable(v -> { throw new TestException(); })
.test()
.assertFailure(TestException.class);
}
@Test
public void dispose() {
TestHelper.checkDisposed(Maybe.never().flattenStreamAsFlowable(Stream::of));
}
@Test
public void doubleOnSubscribe() {
TestHelper.checkDoubleOnSubscribeMaybeToFlowable(m -> m.flattenStreamAsFlowable(Stream::of));
}
@Test
public void badRequest() {
TestHelper.assertBadRequestReported(MaybeSubject.create().flattenStreamAsFlowable(Stream::of));
}
@Test
public void fusedEmpty() {
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
ts.setInitialFusionMode(QueueFuseable.ANY);
Maybe.just(1)
.flattenStreamAsFlowable(v -> Stream.<Integer>of())
.subscribe(ts);
ts.assertFuseable()
.assertFusionMode(QueueFuseable.ASYNC)
.assertResult();
}
@Test
public void fusedJust() {
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
ts.setInitialFusionMode(QueueFuseable.ANY);
Maybe.just(1)
.flattenStreamAsFlowable(v -> Stream.<Integer>of(v))
.subscribe(ts);
ts.assertFuseable()
.assertFusionMode(QueueFuseable.ASYNC)
.assertResult(1);
}
@Test
public void fusedMany() {
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
ts.setInitialFusionMode(QueueFuseable.ANY);
Maybe.just(1)
.flattenStreamAsFlowable(v -> Stream.<Integer>of(v, v + 1, v + 2))
.subscribe(ts);
ts.assertFuseable()
.assertFusionMode(QueueFuseable.ASYNC)
.assertResult(1, 2, 3);
}
@Test
public void fusedManyRejected() {
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
ts.setInitialFusionMode(QueueFuseable.SYNC);
Maybe.just(1)
.flattenStreamAsFlowable(v -> Stream.<Integer>of(v, v + 1, v + 2))
.subscribe(ts);
ts.assertFuseable()
.assertFusionMode(QueueFuseable.NONE)
.assertResult(1, 2, 3);
}
@Test
public void manyBackpressured() {
Maybe.just(1)
.flattenStreamAsFlowable(v -> IntStream.rangeClosed(1, 5).boxed())
.test(0L)
.assertEmpty()
.requestMore(2)
.assertValuesOnly(1, 2)
.requestMore(2)
.assertValuesOnly(1, 2, 3, 4)
.requestMore(1)
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void manyBackpressured2() {
Maybe.just(1)
.flattenStreamAsFlowable(v -> IntStream.rangeClosed(1, 5).boxed())
.rebatchRequests(1)
.test(0L)
.assertEmpty()
.requestMore(2)
.assertValuesOnly(1, 2)
.requestMore(2)
.assertValuesOnly(1, 2, 3, 4)
.requestMore(1)
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void fusedStreamAvailableLater() {
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
ts.setInitialFusionMode(QueueFuseable.ANY);
MaybeSubject<Integer> ms = MaybeSubject.create();
ms
.flattenStreamAsFlowable(v -> Stream.<Integer>of(v, v + 1, v + 2))
.subscribe(ts);
ts.assertFuseable()
.assertFusionMode(QueueFuseable.ASYNC)
.assertEmpty();
ms.onSuccess(1);
ts
.assertResult(1, 2, 3);
}
@Test
public void fused() throws Throwable {
AtomicReference<QueueSubscription<Integer>> qsr = new AtomicReference<>();
MaybeSubject<Integer> ms = MaybeSubject.create();
ms
.flattenStreamAsFlowable(Stream::of)
.subscribe(new FlowableSubscriber<Integer>() {
@Override
public void onNext(Integer t) {
}
@Override
public void onError(Throwable t) {
}
@Override
public void onComplete() {
}
@Override
@SuppressWarnings("unchecked")
public void onSubscribe(@NonNull Subscription s) {
qsr.set((QueueSubscription<Integer>)s);
}
});
QueueSubscription<Integer> qs = qsr.get();
assertEquals(QueueFuseable.ASYNC, qs.requestFusion(QueueFuseable.ASYNC));
assertTrue(qs.isEmpty());
assertNull(qs.poll());
ms.onSuccess(1);
assertFalse(qs.isEmpty());
assertEquals(1, qs.poll().intValue());
assertTrue(qs.isEmpty());
assertNull(qs.poll());
qs.cancel();
assertTrue(qs.isEmpty());
assertNull(qs.poll());
}
@Test
public void requestOneByOne() {
TestSubscriber<Integer> ts = new TestSubscriber<>();
Maybe.just(1)
.flattenStreamAsFlowable(v -> Stream.of(1, 2, 3, 4, 5))
.subscribe(new FlowableSubscriber<Integer>() {
Subscription upstream;
@Override
public void onSubscribe(@NonNull Subscription s) {
ts.onSubscribe(new BooleanSubscription());
upstream = s;
s.request(1);
}
@Override
public void onNext(Integer t) {
ts.onNext(t);
upstream.request(1);
}
@Override
public void onError(Throwable t) {
ts.onError(t);
}
@Override
public void onComplete() {
ts.onComplete();
}
});
ts.assertResult(1, 2, 3, 4, 5);
}
@Test
public void streamCloseCrash() throws Throwable {
TestHelper.withErrorTracking(errors -> {
Maybe.just(1)
.flattenStreamAsFlowable(v -> Stream.of(v).onClose(() -> { throw new TestException(); }))
.test()
.assertResult(1);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
});
}
@Test
public void hasNextThrowsInDrain() {
@SuppressWarnings("unchecked")
Stream<Integer> stream = mock(Stream.class);
when(stream.iterator()).thenReturn(new Iterator<Integer>() {
int count;
@Override
public boolean hasNext() {
if (count++ > 0) {
throw new TestException();
}
return true;
}
@Override
public Integer next() {
return 1;
}
});
Maybe.just(1)
.flattenStreamAsFlowable(v -> stream)
.test()
.assertFailure(TestException.class, 1);
}
@Test
public void nextThrowsInDrain() {
@SuppressWarnings("unchecked")
Stream<Integer> stream = mock(Stream.class);
when(stream.iterator()).thenReturn(new Iterator<Integer>() {
@Override
public boolean hasNext() {
return true;
}
@Override
public Integer next() {
throw new TestException();
}
});
Maybe.just(1)
.flattenStreamAsFlowable(v -> stream)
.test()
.assertFailure(TestException.class);
}
@Test
public void cancelAfterHasNextInDrain() {
@SuppressWarnings("unchecked")
Stream<Integer> stream = mock(Stream.class);
TestSubscriber<Integer> ts = new TestSubscriber<>();
when(stream.iterator()).thenReturn(new Iterator<Integer>() {
int count;
@Override
public boolean hasNext() {
if (count++ > 0) {
ts.cancel();
}
return true;
}
@Override
public Integer next() {
return 1;
}
});
Maybe.just(1)
.flattenStreamAsFlowable(v -> stream)
.subscribeWith(ts)
.assertValuesOnly(1);
}
@Test
public void cancelAfterNextInDrain() {
@SuppressWarnings("unchecked")
Stream<Integer> stream = mock(Stream.class);
TestSubscriber<Integer> ts = new TestSubscriber<>();
when(stream.iterator()).thenReturn(new Iterator<Integer>() {
@Override
public boolean hasNext() {
return true;
}
@Override
public Integer next() {
ts.cancel();
return 1;
}
});
Maybe.just(1)
.flattenStreamAsFlowable(v -> stream)
.subscribeWith(ts)
.assertEmpty();
}
@Test
public void requestSuccessRace() {
for (int i = 0; i < TestHelper.RACE_LONG_LOOPS; i++) {
MaybeSubject<Integer> ms = MaybeSubject.create();
TestSubscriber<Integer> ts = new TestSubscriber<>(0L);
ms.flattenStreamAsFlowable(Stream::of)
.subscribe(ts);
Runnable r1 = () -> ms.onSuccess(1);
Runnable r2 = () -> ts.request(1);
TestHelper.race(r1, r2);
ts.assertResult(1);
}
}
}
| MaybeFlattenStreamAsFlowableTest |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/context/PropertyPlaceholderAutoConfigurationTests.java | {
"start": 1580,
"end": 4395
} | class ____ {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner();
@Test
void whenTheAutoConfigurationIsNotUsedThenBeanDefinitionPlaceholdersAreNotResolved() {
this.contextRunner.withPropertyValues("fruit:banana")
.withInitializer(this::definePlaceholderBean)
.run((context) -> assertThat(context.getBean(PlaceholderBean.class).fruit).isEqualTo("${fruit:apple}"));
}
@Test
void whenTheAutoConfigurationIsUsedThenBeanDefinitionPlaceholdersAreResolved() {
this.contextRunner.withPropertyValues("fruit:banana")
.withInitializer(this::definePlaceholderBean)
.withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class))
.run((context) -> assertThat(context.getBean(PlaceholderBean.class).fruit).isEqualTo("banana"));
}
@Test
void whenTheAutoConfigurationIsNotUsedThenValuePlaceholdersAreResolved() {
this.contextRunner.withPropertyValues("fruit:banana")
.withUserConfiguration(PlaceholderConfig.class)
.run((context) -> assertThat(context.getBean(PlaceholderConfig.class).fruit).isEqualTo("banana"));
}
@Test
void whenTheAutoConfigurationIsUsedThenValuePlaceholdersAreResolved() {
this.contextRunner.withPropertyValues("fruit:banana")
.withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class))
.withUserConfiguration(PlaceholderConfig.class)
.run((context) -> assertThat(context.getBean(PlaceholderConfig.class).fruit).isEqualTo("banana"));
}
@Test
void whenThereIsAUserDefinedPropertySourcesPlaceholderConfigurerThenItIsUsedForBeanDefinitionPlaceholderResolution() {
this.contextRunner.withPropertyValues("fruit:banana")
.withInitializer(this::definePlaceholderBean)
.withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class))
.withUserConfiguration(PlaceholdersOverride.class)
.run((context) -> assertThat(context.getBean(PlaceholderBean.class).fruit).isEqualTo("orange"));
}
@Test
void whenThereIsAUserDefinedPropertySourcesPlaceholderConfigurerThenItIsUsedForValuePlaceholderResolution() {
this.contextRunner.withPropertyValues("fruit:banana")
.withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class))
.withUserConfiguration(PlaceholderConfig.class, PlaceholdersOverride.class)
.run((context) -> assertThat(context.getBean(PlaceholderConfig.class).fruit).isEqualTo("orange"));
}
private void definePlaceholderBean(ConfigurableApplicationContext context) {
((BeanDefinitionRegistry) context.getBeanFactory()).registerBeanDefinition("placeholderBean",
BeanDefinitionBuilder.rootBeanDefinition(PlaceholderBean.class)
.addConstructorArgValue("${fruit:apple}")
.getBeanDefinition());
}
@Configuration(proxyBeanMethods = false)
static | PropertyPlaceholderAutoConfigurationTests |
java | micronaut-projects__micronaut-core | http-server-netty/src/main/java/io/micronaut/http/server/netty/NettyResponseLifecycle.java | {
"start": 4146,
"end": 5612
} | class ____ extends ConcatenatingSubscriber implements BufferConsumer {
static final Separators JSON_NETTY = Separators.jsonSeparators(NettyReadBufferFactory.of(ByteBufAllocator.DEFAULT));
private final EventLoopFlow flow;
NettyConcatenatingSubscriber(NettyByteBodyFactory byteBodyFactory, Separators separators) {
super(byteBodyFactory, separators);
this.flow = new EventLoopFlow(((StreamingNettyByteBody.SharedBuffer) sharedBuffer).eventLoop());
}
static CloseableByteBody concatenate(NettyByteBodyFactory byteBodyFactory, Separators separators, Publisher<ByteBody> publisher) {
NettyConcatenatingSubscriber subscriber = new NettyConcatenatingSubscriber(byteBodyFactory, separators);
publisher.subscribe(subscriber);
return subscriber.rootBody;
}
@Override
public void add(@NonNull ReadBuffer buffer) {
if (flow.executeNow(() -> super.add(buffer))) {
super.add(buffer);
}
}
@Override
protected void forwardComplete() {
if (flow.executeNow(super::forwardComplete)) {
super.forwardComplete();
}
}
@Override
protected void forwardError(Throwable t) {
if (flow.executeNow(() -> super.forwardError(t))) {
super.forwardError(t);
}
}
}
}
| NettyConcatenatingSubscriber |
java | google__truth | extensions/proto/src/main/java/com/google/common/truth/extensions/proto/MapWithProtoValuesSubject.java | {
"start": 2681,
"end": 28166
} | class ____<M extends Message> extends MapSubject {
/*
* Storing a FailureMetadata instance in a Subject subclass is generally a bad practice. For an
* explanation of why it works out OK here, see LiteProtoSubject.
*/
private final FailureMetadata metadata;
private final Map<?, M> actual;
private final FluentEqualityConfig config;
protected MapWithProtoValuesSubject(FailureMetadata failureMetadata, @Nullable Map<?, M> map) {
this(failureMetadata, FluentEqualityConfig.defaultInstance(), map);
}
MapWithProtoValuesSubject(
FailureMetadata failureMetadata, FluentEqualityConfig config, @Nullable Map<?, M> map) {
super(failureMetadata, map);
this.metadata = failureMetadata;
this.actual = map;
this.config = config;
}
//////////////////////////////////////////////////////////////////////////////////////////////////
// MapWithProtoValuesFluentAssertion Configuration
//////////////////////////////////////////////////////////////////////////////////////////////////
MapWithProtoValuesFluentAssertion<M> usingConfig(FluentEqualityConfig newConfig) {
return new MapWithProtoValuesFluentAssertionImpl<>(
new MapWithProtoValuesSubject<>(metadata, newConfig, actual));
}
/**
* Specifies that the 'has' bit of individual fields should be ignored when comparing for
* equality.
*
* <p>For version 2 Protocol Buffers, this setting determines whether two protos with the same
* value for a field compare equal if one explicitly sets the value, and the other merely
* implicitly uses the schema-defined default. This setting also determines whether unknown fields
* should be considered in the comparison. By {@code ignoringFieldAbsence()}, unknown fields are
* ignored, and value-equal fields as specified above are considered equal.
*
* <p>For version 3 Protocol Buffers, this setting does not affect primitive fields, because their
* default value is indistinguishable from unset.
*/
public MapWithProtoValuesFluentAssertion<M> ignoringFieldAbsenceForValues() {
return usingConfig(config.ignoringFieldAbsence());
}
/**
* Specifies that the 'has' bit of these explicitly specified top-level field numbers should be
* ignored when comparing for equality. Sub-fields must be specified explicitly (via {@link
* FieldDescriptor}) if they are to be ignored as well.
*
* <p>Use {@link #ignoringFieldAbsenceForValues()} instead to ignore the 'has' bit for all fields.
*
* @see #ignoringFieldAbsenceForValues() for details
*/
public MapWithProtoValuesFluentAssertion<M> ignoringFieldAbsenceOfFieldsForValues(
int firstFieldNumber, int... rest) {
return usingConfig(config.ignoringFieldAbsenceOfFields(asList(firstFieldNumber, rest)));
}
/**
* Specifies that the 'has' bit of these explicitly specified top-level field numbers should be
* ignored when comparing for equality. Sub-fields must be specified explicitly (via {@link
* FieldDescriptor}) if they are to be ignored as well.
*
* <p>Use {@link #ignoringFieldAbsenceForValues()} instead to ignore the 'has' bit for all fields.
*
* @see #ignoringFieldAbsenceForValues() for details
*/
public MapWithProtoValuesFluentAssertion<M> ignoringFieldAbsenceOfFieldsForValues(
Iterable<Integer> fieldNumbers) {
return usingConfig(config.ignoringFieldAbsenceOfFields(fieldNumbers));
}
/**
* Specifies that the 'has' bit of these explicitly specified field descriptors should be ignored
* when comparing for equality. Sub-fields must be specified explicitly if they are to be ignored
* as well.
*
* <p>Use {@link #ignoringFieldAbsenceForValues()} instead to ignore the 'has' bit for all fields.
*
* @see #ignoringFieldAbsenceForValues() for details
*/
public MapWithProtoValuesFluentAssertion<M> ignoringFieldAbsenceOfFieldDescriptorsForValues(
FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) {
return usingConfig(
config.ignoringFieldAbsenceOfFieldDescriptors(asList(firstFieldDescriptor, rest)));
}
/**
* Specifies that the 'has' bit of these explicitly specified field descriptors should be ignored
* when comparing for equality. Sub-fields must be specified explicitly if they are to be ignored
* as well.
*
* <p>Use {@link #ignoringFieldAbsenceForValues()} instead to ignore the 'has' bit for all fields.
*
* @see #ignoringFieldAbsenceForValues() for details
*/
public MapWithProtoValuesFluentAssertion<M> ignoringFieldAbsenceOfFieldDescriptorsForValues(
Iterable<FieldDescriptor> fieldDescriptors) {
return usingConfig(config.ignoringFieldAbsenceOfFieldDescriptors(fieldDescriptors));
}
/**
* Specifies that the ordering of repeated fields, at all levels, should be ignored when comparing
* for equality.
*
* <p>This setting applies to all repeated fields recursively, but it does not ignore structure.
* For example, with {@link #ignoringRepeatedFieldOrderForValues()}, a repeated {@code int32}
* field {@code bar}, set inside a repeated message field {@code foo}, the following protos will
* all compare equal:
*
* <pre>{@code
* message1: {
* foo: {
* bar: 1
* bar: 2
* }
* foo: {
* bar: 3
* bar: 4
* }
* }
*
* message2: {
* foo: {
* bar: 2
* bar: 1
* }
* foo: {
* bar: 4
* bar: 3
* }
* }
*
* message3: {
* foo: {
* bar: 4
* bar: 3
* }
* foo: {
* bar: 2
* bar: 1
* }
* }
* }</pre>
*
* <p>However, the following message will compare equal to none of these:
*
* <pre>{@code
* message4: {
* foo: {
* bar: 1
* bar: 3
* }
* foo: {
* bar: 2
* bar: 4
* }
* }
* }</pre>
*
* <p>This setting does not apply to map fields, for which field order is always ignored. The
* serialization order of map fields is undefined, and it may change from runtime to runtime.
*/
public MapWithProtoValuesFluentAssertion<M> ignoringRepeatedFieldOrderForValues() {
return usingConfig(config.ignoringRepeatedFieldOrder());
}
/**
* Specifies that the ordering of repeated fields for these explicitly specified top-level field
* numbers should be ignored when comparing for equality. Sub-fields must be specified explicitly
* (via {@link FieldDescriptor}) if their orders are to be ignored as well.
*
* <p>Use {@link #ignoringRepeatedFieldOrderForValues()} instead to ignore order for all fields.
*
* @see #ignoringRepeatedFieldOrderForValues() for details.
*/
public MapWithProtoValuesFluentAssertion<M> ignoringRepeatedFieldOrderOfFieldsForValues(
int firstFieldNumber, int... rest) {
return usingConfig(config.ignoringRepeatedFieldOrderOfFields(asList(firstFieldNumber, rest)));
}
/**
* Specifies that the ordering of repeated fields for these explicitly specified top-level field
* numbers should be ignored when comparing for equality. Sub-fields must be specified explicitly
* (via {@link FieldDescriptor}) if their orders are to be ignored as well.
*
* <p>Use {@link #ignoringRepeatedFieldOrderForValues()} instead to ignore order for all fields.
*
* @see #ignoringRepeatedFieldOrderForValues() for details.
*/
public MapWithProtoValuesFluentAssertion<M> ignoringRepeatedFieldOrderOfFieldsForValues(
Iterable<Integer> fieldNumbers) {
return usingConfig(config.ignoringRepeatedFieldOrderOfFields(fieldNumbers));
}
/**
* Specifies that the ordering of repeated fields for these explicitly specified field descriptors
* should be ignored when comparing for equality. Sub-fields must be specified explicitly if their
* orders are to be ignored as well.
*
* <p>Use {@link #ignoringRepeatedFieldOrderForValues()} instead to ignore order for all fields.
*
* @see #ignoringRepeatedFieldOrderForValues() for details.
*/
public MapWithProtoValuesFluentAssertion<M> ignoringRepeatedFieldOrderOfFieldDescriptorsForValues(
FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) {
return usingConfig(
config.ignoringRepeatedFieldOrderOfFieldDescriptors(asList(firstFieldDescriptor, rest)));
}
/**
* Specifies that the ordering of repeated fields for these explicitly specified field descriptors
* should be ignored when comparing for equality. Sub-fields must be specified explicitly if their
* orders are to be ignored as well.
*
* <p>Use {@link #ignoringRepeatedFieldOrderForValues()} instead to ignore order for all fields.
*
* @see #ignoringRepeatedFieldOrderForValues() for details.
*/
public MapWithProtoValuesFluentAssertion<M> ignoringRepeatedFieldOrderOfFieldDescriptorsForValues(
Iterable<FieldDescriptor> fieldDescriptors) {
return usingConfig(config.ignoringRepeatedFieldOrderOfFieldDescriptors(fieldDescriptors));
}
/**
* Specifies that, for all repeated and map fields, any elements in the 'actual' proto which are
* not found in the 'expected' proto are ignored, with the exception of fields in the expected
* proto which are empty. To ignore empty repeated fields as well, use {@link
* #comparingExpectedFieldsOnlyForValues}.
*
* <p>This rule is applied independently from {@link #ignoringRepeatedFieldOrderForValues}. If
* ignoring repeated field order AND extra repeated field elements, all that is tested is that the
* expected elements comprise a subset of the actual elements. If not ignoring repeated field
* order, but still ignoring extra repeated field elements, the actual elements must contain a
* subsequence that matches the expected elements for the test to pass. (The subsequence rule does
* not apply to Map fields, which are always compared by key.)
*/
public MapWithProtoValuesFluentAssertion<M> ignoringExtraRepeatedFieldElementsForValues() {
return usingConfig(config.ignoringExtraRepeatedFieldElements());
}
/**
* Specifies that extra repeated field elements for these explicitly specified top-level field
* numbers should be ignored. Sub-fields must be specified explicitly (via {@link
* FieldDescriptor}) if their extra elements are to be ignored as well.
*
* <p>Use {@link #ignoringExtraRepeatedFieldElementsForValues()} instead to ignore these for all
* fields.
*
* @see #ignoringExtraRepeatedFieldElementsForValues() for details.
*/
public MapWithProtoValuesFluentAssertion<M> ignoringExtraRepeatedFieldElementsOfFieldsForValues(
int firstFieldNumber, int... rest) {
return usingConfig(
config.ignoringExtraRepeatedFieldElementsOfFields(asList(firstFieldNumber, rest)));
}
/**
* Specifies that extra repeated field elements for these explicitly specified top-level field
* numbers should be ignored. Sub-fields must be specified explicitly (via {@link
* FieldDescriptor}) if their extra elements are to be ignored as well.
*
* <p>Use {@link #ignoringExtraRepeatedFieldElementsForValues()} instead to ignore these for all
* fields.
*
* @see #ignoringExtraRepeatedFieldElementsForValues() for details.
*/
public MapWithProtoValuesFluentAssertion<M> ignoringExtraRepeatedFieldElementsOfFieldsForValues(
Iterable<Integer> fieldNumbers) {
return usingConfig(config.ignoringExtraRepeatedFieldElementsOfFields(fieldNumbers));
}
/**
* Specifies that extra repeated field elements for these explicitly specified field descriptors
* should be ignored. Sub-fields must be specified explicitly if their extra elements are to be
* ignored as well.
*
* <p>Use {@link #ignoringExtraRepeatedFieldElementsForValues()} instead to ignore these for all
* fields.
*
* @see #ignoringExtraRepeatedFieldElementsForValues() for details.
*/
public MapWithProtoValuesFluentAssertion<M>
ignoringExtraRepeatedFieldElementsOfFieldDescriptorsForValues(
FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) {
return usingConfig(
config.ignoringExtraRepeatedFieldElementsOfFieldDescriptors(
asList(firstFieldDescriptor, rest)));
}
/**
* Specifies that extra repeated field elements for these explicitly specified field descriptors
* should be ignored. Sub-fields must be specified explicitly if their extra elements are to be
* ignored as well.
*
* <p>Use {@link #ignoringExtraRepeatedFieldElementsForValues()} instead to ignore these for all
* fields.
*
* @see #ignoringExtraRepeatedFieldElementsForValues() for details.
*/
public MapWithProtoValuesFluentAssertion<M>
ignoringExtraRepeatedFieldElementsOfFieldDescriptorsForValues(
Iterable<FieldDescriptor> fieldDescriptors) {
return usingConfig(
config.ignoringExtraRepeatedFieldElementsOfFieldDescriptors(fieldDescriptors));
}
/**
* Compares double fields as equal if they are both finite and their absolute difference is less
* than or equal to {@code tolerance}.
*
* @param tolerance A finite, non-negative tolerance.
*/
public MapWithProtoValuesFluentAssertion<M> usingDoubleToleranceForValues(double tolerance) {
return usingConfig(config.usingDoubleTolerance(tolerance));
}
/**
* Compares double fields with these explicitly specified top-level field numbers using the
* provided absolute tolerance.
*
* @param tolerance A finite, non-negative tolerance.
*/
public MapWithProtoValuesFluentAssertion<M> usingDoubleToleranceForFieldsForValues(
double tolerance, int firstFieldNumber, int... rest) {
return usingConfig(
config.usingDoubleToleranceForFields(tolerance, asList(firstFieldNumber, rest)));
}
/**
* Compares double fields with these explicitly specified top-level field numbers using the
* provided absolute tolerance.
*
* @param tolerance A finite, non-negative tolerance.
*/
public MapWithProtoValuesFluentAssertion<M> usingDoubleToleranceForFieldsForValues(
double tolerance, Iterable<Integer> fieldNumbers) {
return usingConfig(config.usingDoubleToleranceForFields(tolerance, fieldNumbers));
}
/**
* Compares double fields with these explicitly specified fields using the provided absolute
* tolerance.
*
* @param tolerance A finite, non-negative tolerance.
*/
public MapWithProtoValuesFluentAssertion<M> usingDoubleToleranceForFieldDescriptorsForValues(
double tolerance, FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) {
return usingConfig(
config.usingDoubleToleranceForFieldDescriptors(
tolerance, asList(firstFieldDescriptor, rest)));
}
/**
* Compares double fields with these explicitly specified fields using the provided absolute
* tolerance.
*
* @param tolerance A finite, non-negative tolerance.
*/
public MapWithProtoValuesFluentAssertion<M> usingDoubleToleranceForFieldDescriptorsForValues(
double tolerance, Iterable<FieldDescriptor> fieldDescriptors) {
return usingConfig(config.usingDoubleToleranceForFieldDescriptors(tolerance, fieldDescriptors));
}
/**
* Compares float fields as equal if they are both finite and their absolute difference is less
* than or equal to {@code tolerance}.
*
* @param tolerance A finite, non-negative tolerance.
*/
public MapWithProtoValuesFluentAssertion<M> usingFloatToleranceForValues(float tolerance) {
return usingConfig(config.usingFloatTolerance(tolerance));
}
/**
* Compares float fields with these explicitly specified top-level field numbers using the
* provided absolute tolerance.
*
* @param tolerance A finite, non-negative tolerance.
*/
public MapWithProtoValuesFluentAssertion<M> usingFloatToleranceForFieldsForValues(
float tolerance, int firstFieldNumber, int... rest) {
return usingConfig(
config.usingFloatToleranceForFields(tolerance, asList(firstFieldNumber, rest)));
}
/**
* Compares float fields with these explicitly specified top-level field numbers using the
* provided absolute tolerance.
*
* @param tolerance A finite, non-negative tolerance.
*/
public MapWithProtoValuesFluentAssertion<M> usingFloatToleranceForFieldsForValues(
float tolerance, Iterable<Integer> fieldNumbers) {
return usingConfig(config.usingFloatToleranceForFields(tolerance, fieldNumbers));
}
/**
* Compares float fields with these explicitly specified fields using the provided absolute
* tolerance.
*
* @param tolerance A finite, non-negative tolerance.
*/
public MapWithProtoValuesFluentAssertion<M> usingFloatToleranceForFieldDescriptorsForValues(
float tolerance, FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) {
return usingConfig(
config.usingFloatToleranceForFieldDescriptors(
tolerance, asList(firstFieldDescriptor, rest)));
}
/**
* Compares float fields with these explicitly specified top-level field numbers using the
* provided absolute tolerance.
*
* @param tolerance A finite, non-negative tolerance.
*/
public MapWithProtoValuesFluentAssertion<M> usingFloatToleranceForFieldDescriptorsForValues(
float tolerance, Iterable<FieldDescriptor> fieldDescriptors) {
return usingConfig(config.usingFloatToleranceForFieldDescriptors(tolerance, fieldDescriptors));
}
/**
* Limits the comparison of Protocol buffers to the fields set in the expected proto(s). When
* multiple protos are specified, the comparison is limited to the union of set fields in all the
* expected protos.
*
* <p>The "expected proto(s)" are those passed to the method at the end of the call chain, such as
* {@link #containsEntry} or {@link #containsExactlyEntriesIn}.
*
* <p>Fields not set in the expected proto(s) are ignored. In particular, proto3 fields which have
* their default values are ignored, as these are indistinguishable from unset fields. If you want
* to assert that a proto3 message has certain fields with default values, you cannot use this
* method.
*/
public MapWithProtoValuesFluentAssertion<M> comparingExpectedFieldsOnlyForValues() {
return usingConfig(config.comparingExpectedFieldsOnly());
}
/**
* Limits the comparison of Protocol buffers to the defined {@link FieldScope}.
*
* <p>This method is additive and has well-defined ordering semantics. If the invoking {@link
* ProtoFluentAssertion} is already scoped to a {@link FieldScope} {@code X}, and this method is
* invoked with {@link FieldScope} {@code Y}, the resultant {@link ProtoFluentAssertion} is
* constrained to the intersection of {@link FieldScope}s {@code X} and {@code Y}.
*
* <p>By default, {@link MapWithProtoValuesFluentAssertion} is constrained to {@link
* FieldScopes#all()}, that is, no fields are excluded from comparison.
*/
public MapWithProtoValuesFluentAssertion<M> withPartialScopeForValues(FieldScope fieldScope) {
return usingConfig(config.withPartialScope(checkNotNull(fieldScope, "fieldScope")));
}
/**
* Excludes the top-level message fields with the given tag numbers from the comparison.
*
* <p>This method adds on any previous {@link FieldScope} related settings, overriding previous
* changes to ensure the specified fields are ignored recursively. All sub-fields of these field
* numbers are ignored, and all sub-messages of type {@code M} will also have these field numbers
* ignored.
*
* <p>If an invalid field number is supplied, the terminal comparison operation will throw a
* runtime exception.
*/
public MapWithProtoValuesFluentAssertion<M> ignoringFieldsForValues(
int firstFieldNumber, int... rest) {
return ignoringFieldsForValues(asList(firstFieldNumber, rest));
}
/**
* Excludes the top-level message fields with the given tag numbers from the comparison.
*
* <p>This method adds on any previous {@link FieldScope} related settings, overriding previous
* changes to ensure the specified fields are ignored recursively. All sub-fields of these field
* numbers are ignored, and all sub-messages of type {@code M} will also have these field numbers
* ignored.
*
* <p>If an invalid field number is supplied, the terminal comparison operation will throw a
* runtime exception.
*/
public MapWithProtoValuesFluentAssertion<M> ignoringFieldsForValues(
Iterable<Integer> fieldNumbers) {
return usingConfig(config.ignoringFields(fieldNumbers));
}
/**
* Excludes all message fields matching the given {@link FieldDescriptor}s from the comparison.
*
* <p>This method adds on any previous {@link FieldScope} related settings, overriding previous
* changes to ensure the specified fields are ignored recursively. All sub-fields of these field
* descriptors are ignored, no matter where they occur in the tree.
*
* <p>If a field descriptor which does not, or cannot occur in the proto structure is supplied, it
* is silently ignored.
*/
public MapWithProtoValuesFluentAssertion<M> ignoringFieldDescriptorsForValues(
FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) {
return ignoringFieldDescriptorsForValues(asList(firstFieldDescriptor, rest));
}
/**
* Excludes all message fields matching the given {@link FieldDescriptor}s from the comparison.
*
* <p>This method adds on any previous {@link FieldScope} related settings, overriding previous
* changes to ensure the specified fields are ignored recursively. All sub-fields of these field
* descriptors are ignored, no matter where they occur in the tree.
*
* <p>If a field descriptor which does not, or cannot occur in the proto structure is supplied, it
* is silently ignored.
*/
public MapWithProtoValuesFluentAssertion<M> ignoringFieldDescriptorsForValues(
Iterable<FieldDescriptor> fieldDescriptors) {
return usingConfig(config.ignoringFieldDescriptors(fieldDescriptors));
}
/**
* Excludes all specific field paths under the argument {@link FieldScope} from the comparison.
*
* <p>This method is additive and has well-defined ordering semantics. If the invoking {@link
* ProtoFluentAssertion} is already scoped to a {@link FieldScope} {@code X}, and this method is
* invoked with {@link FieldScope} {@code Y}, the resultant {@link ProtoFluentAssertion} is
* constrained to the subtraction of {@code X - Y}.
*
* <p>By default, {@link ProtoFluentAssertion} is constrained to {@link FieldScopes#all()}, that
* is, no fields are excluded from comparison.
*/
public MapWithProtoValuesFluentAssertion<M> ignoringFieldScopeForValues(FieldScope fieldScope) {
return usingConfig(config.ignoringFieldScope(checkNotNull(fieldScope, "fieldScope")));
}
/**
* If set, in the event of a comparison failure, the error message printed will list only those
* specific fields that did not match between the actual and expected values. Useful for very
* large protocol buffers.
*
* <p>This a purely cosmetic setting, and it has no effect on the behavior of the test.
*/
public MapWithProtoValuesFluentAssertion<M> reportingMismatchesOnlyForValues() {
return usingConfig(config.reportingMismatchesOnly());
}
/**
* Specifies the {@link TypeRegistry} and {@link ExtensionRegistry} to use for {@link
* com.google.protobuf.Any Any} messages.
*
* <p>To compare the value of an {@code Any} message, ProtoTruth looks in the given type registry
* for a descriptor for the message's type URL:
*
* <ul>
* <li>If ProtoTruth finds a descriptor, it unpacks the value and compares it against the
* expected value, respecting any configuration methods used for the assertion.
* <li>If ProtoTruth does not find a descriptor (or if the value can't be deserialized with the
* descriptor), it compares the raw, serialized bytes of the expected and actual values.
* </ul>
*
* <p>When ProtoTruth unpacks a value, it is parsing a serialized proto. That proto may contain
* extensions. To look up those extensions, ProtoTruth uses the provided {@link
* ExtensionRegistry}.
*
* @since 1.1
*/
public MapWithProtoValuesFluentAssertion<M> unpackingAnyUsingForValues(
TypeRegistry typeRegistry, ExtensionRegistry extensionRegistry) {
return usingConfig(config.unpackingAnyUsing(typeRegistry, extensionRegistry));
}
//////////////////////////////////////////////////////////////////////////////////////////////////
// UsingCorrespondence Methods
//////////////////////////////////////////////////////////////////////////////////////////////////
private MapSubject.UsingCorrespondence<M, M> usingCorrespondence(
Iterable<? extends M> expectedValues) {
return comparingValuesUsing(
config
.withExpectedMessages(expectedValues)
.<M>toCorrespondence(FieldScopeUtil.getSingleDescriptor(actual.values())));
}
// The UsingCorrespondence methods have conflicting erasure with default MapSubject methods,
// so we can't implement them both on the same class, but we want to define both so
// MapWithProtoValuesSubjects are interchangeable with MapSubjects when no configuration is
// specified. So, we implement a dumb, private delegator to return instead.
private static final | MapWithProtoValuesSubject |
java | junit-team__junit5 | platform-tests/src/test/java/org/junit/platform/engine/discovery/ClassNameFilterTests.java | {
"start": 2568,
"end": 4352
} | class ____ that match one of the following regular expressions: '"
+ firstRegex + "' OR '" + secondRegex + "'");
assertTrue(filter.apply("java.lang.String").included());
assertTrue(filter.toPredicate().test("java.lang.String"));
assertThat(filter.apply("java.lang.String").getReason()).contains(
"Class name [java.lang.String] matches included pattern: '" + firstRegex + "'");
assertTrue(filter.apply("java.util.Collection").included());
assertTrue(filter.toPredicate().test("java.util.Collection"));
assertThat(filter.apply("java.util.Collection").getReason()).contains(
"Class name [java.util.Collection] matches included pattern: '" + secondRegex + "'");
assertFalse(filter.apply("java.time.Instant").included());
assertFalse(filter.toPredicate().test("java.time.Instant"));
assertThat(filter.apply("java.time.Instant").getReason()).contains(
"Class name [java.time.Instant] does not match any included pattern: '" + firstRegex + "' OR '"
+ secondRegex + "'");
}
@SuppressWarnings("DataFlowIssue")
@Test
void excludeClassNamePatternsChecksPreconditions() {
assertPreconditionViolationNotNullOrEmptyFor("patterns array",
() -> ClassNameFilter.excludeClassNamePatterns((String[]) null));
assertPreconditionViolationNotNullOrEmptyFor("patterns array",
() -> ClassNameFilter.excludeClassNamePatterns(new String[0]));
assertPreconditionViolationFor(() -> ClassNameFilter.excludeClassNamePatterns(new String[] { null }))//
.withMessage("patterns array must not contain null elements");
}
@Test
void excludeClassNamePatternsWithSinglePattern() {
var regex = "^java\\.lang\\..*";
var filter = ClassNameFilter.excludeClassNamePatterns(regex);
assertThat(filter).hasToString(
"ExcludeClassNameFilter that excludes | names |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/SplitterNullBodyParallelTest.java | {
"start": 961,
"end": 1491
} | class ____ extends SplitterNullBodyTest {
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").split(body()).parallelProcessing().to("mock:split").end().to("mock:result");
from("direct:streaming").split(body()).streaming().parallelProcessing().to("mock:split").end()
.to("mock:result");
}
};
}
}
| SplitterNullBodyParallelTest |
java | apache__dubbo | dubbo-cluster/src/main/java/org/apache/dubbo/rpc/cluster/router/mesh/route/StandardMeshRuleRouterFactory.java | {
"start": 1108,
"end": 1324
} | class ____ implements StateRouterFactory {
@Override
public <T> StateRouter<T> getRouter(Class<T> interfaceClass, URL url) {
return new StandardMeshRuleRouter<>(url);
}
}
| StandardMeshRuleRouterFactory |
java | square__retrofit | retrofit/src/main/java/retrofit2/ParameterHandler.java | {
"start": 13750,
"end": 14535
} | class ____<T> extends ParameterHandler<T> {
private final Method method;
private final int p;
private final Converter<T, RequestBody> converter;
Body(Method method, int p, Converter<T, RequestBody> converter) {
this.method = method;
this.p = p;
this.converter = converter;
}
@Override
void apply(RequestBuilder builder, @Nullable T value) {
if (value == null) {
throw Utils.parameterError(method, p, "Body parameter value must not be null.");
}
RequestBody body;
try {
body = converter.convert(value);
} catch (IOException e) {
throw Utils.parameterError(method, e, p, "Unable to convert " + value + " to RequestBody");
}
builder.setBody(body);
}
}
static final | Body |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/function/xml/XmlExistsFunction.java | {
"start": 1299,
"end": 2561
} | class ____ extends AbstractSqmSelfRenderingFunctionDescriptor {
public XmlExistsFunction(TypeConfiguration typeConfiguration) {
super(
"xmlexists",
FunctionKind.NORMAL,
StandardArgumentsValidators.composite(
new ArgumentTypesValidator( null, STRING, IMPLICIT_XML )
),
StandardFunctionReturnTypeResolvers.invariant(
typeConfiguration.getBasicTypeRegistry().getRegisteredType( Boolean.class )
),
StandardFunctionArgumentTypeResolvers.invariant( typeConfiguration, STRING, XML )
);
}
@Override
public boolean isPredicate() {
return true;
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> sqlAstArguments,
ReturnableType<?> returnType,
SqlAstTranslator<?> walker) {
final Expression xmlDocument = (Expression) sqlAstArguments.get( 1 );
final boolean needsCast = !ExpressionTypeHelper.isXml( xmlDocument );
sqlAppender.appendSql( "xmlexists(" );
sqlAstArguments.get( 0 ).accept( walker );
sqlAppender.appendSql( " passing " );
if ( needsCast ) {
sqlAppender.appendSql( "xmlparse(document " );
}
sqlAstArguments.get( 1 ).accept( walker );
if ( needsCast ) {
sqlAppender.appendSql( ')' );
}
sqlAppender.appendSql( ')' );
}
}
| XmlExistsFunction |
java | alibaba__nacos | naming/src/test/java/com/alibaba/nacos/naming/paramcheck/ParamExtractorTest.java | {
"start": 1761,
"end": 5506
} | class ____ {
@Mock
private ControllerMethodsCache methodsCache;
private ParamCheckerFilter filter;
@Test
void testBatchUpdateFilter() throws Exception {
AbstractHttpParamExtractor httpExtractor = testExtractor(methodsCache, InstanceController.class, "batchUpdateInstanceMetadata",
HttpServletRequest.class);
assertEquals(NamingInstanceMetadataBatchHttpParamExtractor.class, httpExtractor.getClass());
Mockito.verify(httpExtractor, new Times(1)).extractParam(Mockito.any());
}
@Test
void testListFilter() throws Exception {
AbstractHttpParamExtractor httpExtractor = testExtractor(methodsCache, InstanceController.class, "list", HttpServletRequest.class);
assertEquals(NamingInstanceListHttpParamExtractor.class, httpExtractor.getClass());
Mockito.verify(httpExtractor, new Times(1)).extractParam(Mockito.any());
}
@Test
void testMetadataFilter() throws Exception {
AbstractHttpParamExtractor httpExtractor = testExtractor(methodsCache, InstanceController.class, "batchDeleteInstanceMetadata",
HttpServletRequest.class);
assertEquals(NamingInstanceMetadataBatchHttpParamExtractor.class, httpExtractor.getClass());
Mockito.verify(httpExtractor, new Times(1)).extractParam(Mockito.any());
}
@Test
void testDefaultFilter() throws Exception {
AbstractHttpParamExtractor httpExtractor = testExtractor(methodsCache, InstanceController.class, "register",
HttpServletRequest.class);
assertEquals(NamingDefaultHttpParamExtractor.class, httpExtractor.getClass());
Mockito.verify(httpExtractor, new Times(1)).extractParam(Mockito.any());
}
/**
* Create mock method about AbstractHttpParamExtractor to verify.
*
* @param methodsCache methodsCache
* @param clazz clazz
* @param methodName methodName
* @param parameterTypes parameterTypes
* @return AbstractHttpParamExtractor
*/
public AbstractHttpParamExtractor testExtractor(ControllerMethodsCache methodsCache, Class<?> clazz, String methodName,
Class<?>... parameterTypes) throws NoSuchMethodException, ServletException, IOException {
MockedStatic<EnvUtil> mockedStatic = Mockito.mockStatic(EnvUtil.class);
final Method check = clazz.getMethod(methodName, parameterTypes);
ExtractorManager.Extractor annotation = check.getAnnotation(ExtractorManager.Extractor.class);
if (annotation == null) {
annotation = clazz.getAnnotation(ExtractorManager.Extractor.class);
}
AbstractHttpParamExtractor httpExtractor = Mockito.spy(ExtractorManager.getHttpExtractor(annotation));
MockedStatic<ExtractorManager> managerMockedStatic = Mockito.mockStatic(ExtractorManager.class);
mockedStatic.when(() -> EnvUtil.getProperty(Mockito.any(), Mockito.any(), Mockito.any())).thenAnswer((k) -> k.getArgument(2));
ParamCheckerFilter filter = new ParamCheckerFilter(methodsCache);
ExtractorManager.Extractor finalAnnotation = annotation;
managerMockedStatic.when(() -> ExtractorManager.getHttpExtractor(finalAnnotation)).thenReturn(httpExtractor);
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
Mockito.when(methodsCache.getMethod(request)).thenReturn(check);
filter.doFilter(request, response, (servletRequest, servletResponse) -> {
});
managerMockedStatic.close();
mockedStatic.close();
return httpExtractor;
}
}
| ParamExtractorTest |
java | elastic__elasticsearch | test/framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java | {
"start": 1226,
"end": 12664
} | class ____ extends ESTestCase {
public void testTestRunStartedSupportsClassInDefaultPackage() throws Exception {
LoggingListener loggingListener = new LoggingListener();
Description description = Description.createTestDescription(Class.forName("Dummy"), "dummy");
// Will throw an exception without the check for testClassPackage != null in testRunStarted
loggingListener.testRunStarted(description);
}
public void testCustomLevelPerMethod() throws Exception {
runTestCustomLevelPerMethod(TestClass.class);
}
public void testIssueCustomLevelPerMethod() throws Exception {
runTestCustomLevelPerMethod(TestIssueClass.class);
}
public void testMixedCustomLevelPerMethod() throws Exception {
runTestCustomLevelPerMethod(TestMixedClass.class);
}
private void runTestCustomLevelPerMethod(final Class<?> clazz) throws Exception {
LoggingListener loggingListener = new LoggingListener();
Description suiteDescription = Description.createSuiteDescription(clazz);
Logger xyzLogger = LogManager.getLogger("xyz");
Logger abcLogger = LogManager.getLogger("abc");
final Level level = LogManager.getRootLogger().getLevel();
assertThat(xyzLogger.getLevel(), equalTo(level));
assertThat(abcLogger.getLevel(), equalTo(level));
loggingListener.testRunStarted(suiteDescription);
assertThat(xyzLogger.getLevel(), equalTo(level));
assertThat(abcLogger.getLevel(), equalTo(level));
Method method = clazz.getMethod("annotatedTestMethod");
TestLogging testLogging = method.getAnnotation(TestLogging.class);
TestIssueLogging testIssueLogging = method.getAnnotation(TestIssueLogging.class);
Annotation[] annotations = Stream.of(testLogging, testIssueLogging).filter(Objects::nonNull).toArray(Annotation[]::new);
Description testDescription = Description.createTestDescription(LoggingListenerTests.class, "annotatedTestMethod", annotations);
loggingListener.testStarted(testDescription);
assertThat(xyzLogger.getLevel(), equalTo(Level.TRACE));
assertThat(abcLogger.getLevel(), equalTo(level));
loggingListener.testFinished(testDescription);
assertThat(xyzLogger.getLevel(), equalTo(level));
assertThat(abcLogger.getLevel(), equalTo(level));
loggingListener.testRunFinished(new Result());
assertThat(xyzLogger.getLevel(), equalTo(level));
assertThat(abcLogger.getLevel(), equalTo(level));
}
public void testCustomLevelPerClass() throws Exception {
runTestCustomLevelPerClass(AnnotatedTestClass.class);
}
public void testIssueCustomLevelPerClass() throws Exception {
runTestCustomLevelPerClass(AnnotatedTestIssueClass.class);
}
public void testCustomLevelPerClassMixed() throws Exception {
runTestCustomLevelPerClass(AnnotatedTestMixedClass.class);
}
private void runTestCustomLevelPerClass(final Class<?> clazz) throws Exception {
LoggingListener loggingListener = new LoggingListener();
Description suiteDescription = Description.createSuiteDescription(clazz);
Logger abcLogger = LogManager.getLogger("abc");
Logger xyzLogger = LogManager.getLogger("xyz");
/*
* We include foo and foo.bar to maintain that logging levels are applied from the top of the hierarchy down. This ensures that
* setting the logging level for a parent logger and a child logger applies the parent level first and then the child as otherwise
* setting the parent level would overwrite the child level.
*/
Logger fooLogger = LogManager.getLogger("foo");
Logger fooBarLogger = LogManager.getLogger("foo.bar");
final Level level = LogManager.getRootLogger().getLevel();
assertThat(xyzLogger.getLevel(), equalTo(level));
assertThat(abcLogger.getLevel(), equalTo(level));
assertThat(fooLogger.getLevel(), equalTo(level));
assertThat(fooBarLogger.getLevel(), equalTo(level));
loggingListener.testRunStarted(suiteDescription);
assertThat(xyzLogger.getLevel(), equalTo(level));
assertThat(abcLogger.getLevel(), equalTo(Level.WARN));
assertThat(fooLogger.getLevel(), equalTo(Level.WARN));
assertThat(fooBarLogger.getLevel(), equalTo(Level.ERROR));
Description testDescription = Description.createTestDescription(LoggingListenerTests.class, "test");
loggingListener.testStarted(testDescription);
assertThat(xyzLogger.getLevel(), equalTo(level));
assertThat(abcLogger.getLevel(), equalTo(Level.WARN));
assertThat(fooLogger.getLevel(), equalTo(Level.WARN));
assertThat(fooBarLogger.getLevel(), equalTo(Level.ERROR));
loggingListener.testFinished(testDescription);
assertThat(xyzLogger.getLevel(), equalTo(level));
assertThat(abcLogger.getLevel(), equalTo(Level.WARN));
assertThat(fooLogger.getLevel(), equalTo(Level.WARN));
assertThat(fooBarLogger.getLevel(), equalTo(Level.ERROR));
loggingListener.testRunFinished(new Result());
assertThat(xyzLogger.getLevel(), equalTo(level));
assertThat(abcLogger.getLevel(), equalTo(level));
assertThat(fooLogger.getLevel(), equalTo(level));
assertThat(fooBarLogger.getLevel(), equalTo(level));
}
public void testCustomLevelPerClassAndPerMethod() throws Exception {
runTestCustomLevelPerClassAndPerMethod(AnnotatedTestClass.class);
}
public void testIssueCustomLevelPerClassAndPerMethod() throws Exception {
runTestCustomLevelPerClassAndPerMethod(AnnotatedTestIssueClass.class);
}
public void testCustomLevelPerClassAndPerMethodMixed() throws Exception {
runTestCustomLevelPerClassAndPerMethod(AnnotatedTestMixedClass.class);
}
private void runTestCustomLevelPerClassAndPerMethod(final Class<?> clazz) throws Exception {
LoggingListener loggingListener = new LoggingListener();
Description suiteDescription = Description.createSuiteDescription(clazz);
Logger abcLogger = LogManager.getLogger("abc");
Logger xyzLogger = LogManager.getLogger("xyz");
final Level level = LogManager.getRootLogger().getLevel();
assertThat(xyzLogger.getLevel(), equalTo(level));
assertThat(abcLogger.getLevel(), equalTo(level));
loggingListener.testRunStarted(suiteDescription);
assertThat(xyzLogger.getLevel(), equalTo(level));
assertThat(abcLogger.getLevel(), equalTo(Level.WARN));
Method method = TestClass.class.getMethod("annotatedTestMethod");
TestLogging testLogging = method.getAnnotation(TestLogging.class);
TestIssueLogging testIssueLogging = method.getAnnotation(TestIssueLogging.class);
Annotation[] annotations = Stream.of(testLogging, testIssueLogging).filter(Objects::nonNull).toArray(Annotation[]::new);
Description testDescription = Description.createTestDescription(LoggingListenerTests.class, "annotatedTestMethod", annotations);
loggingListener.testStarted(testDescription);
assertThat(xyzLogger.getLevel(), equalTo(Level.TRACE));
assertThat(abcLogger.getLevel(), equalTo(Level.WARN));
loggingListener.testFinished(testDescription);
assertThat(xyzLogger.getLevel(), equalTo(level));
assertThat(abcLogger.getLevel(), equalTo(Level.WARN));
Method method2 = TestClass.class.getMethod("annotatedTestMethod2");
TestLogging testLogging2 = method2.getAnnotation(TestLogging.class);
TestIssueLogging testIssueLogging2 = method2.getAnnotation(TestIssueLogging.class);
Annotation[] annotations2 = Stream.of(testLogging2, testIssueLogging2).filter(Objects::nonNull).toArray(Annotation[]::new);
Description testDescription2 = Description.createTestDescription(LoggingListenerTests.class, "annotatedTestMethod2", annotations2);
loggingListener.testStarted(testDescription2);
assertThat(xyzLogger.getLevel(), equalTo(Level.DEBUG));
assertThat(abcLogger.getLevel(), equalTo(Level.TRACE));
loggingListener.testFinished(testDescription2);
assertThat(xyzLogger.getLevel(), equalTo(level));
assertThat(abcLogger.getLevel(), equalTo(Level.WARN));
loggingListener.testRunFinished(new Result());
assertThat(xyzLogger.getLevel(), equalTo(level));
assertThat(abcLogger.getLevel(), equalTo(level));
}
public void testInvalidClassTestLoggingAnnotation() throws Exception {
runTestInvalidClassTestLoggingAnnotation(InvalidClass.class);
}
public void testInvalidClassTestIssueLoggingAnnotation() throws Exception {
runTestInvalidClassTestLoggingAnnotation(InvalidIssueClass.class);
}
private void runTestInvalidClassTestLoggingAnnotation(final Class<?> clazz) {
final LoggingListener loggingListener = new LoggingListener();
final Description suiteDescription = Description.createSuiteDescription(clazz);
final IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> loggingListener.testRunStarted(suiteDescription)
);
assertThat(e.getMessage(), equalTo("invalid test logging annotation [abc]"));
}
public void testInvalidMethodTestLoggingAnnotation() throws Exception {
runTestInvalidMethodTestLoggingAnnotation(InvalidTestLoggingMethod.class);
}
public void testInvalidMethodTestIssueLoggingAnnotation() throws Exception {
runTestInvalidMethodTestLoggingAnnotation(InvalidTestIssueLoggingMethod.class);
}
private void runTestInvalidMethodTestLoggingAnnotation(final Class<?> clazz) throws Exception {
final LoggingListener loggingListener = new LoggingListener();
final Description suiteDescription = Description.createSuiteDescription(clazz);
loggingListener.testRunStarted(suiteDescription);
final Method method = clazz.getMethod("invalidMethod");
final TestLogging testLogging = method.getAnnotation(TestLogging.class);
final TestIssueLogging testIssueLogging = method.getAnnotation(TestIssueLogging.class);
final Annotation[] annotations = Stream.of(testLogging, testIssueLogging).filter(Objects::nonNull).toArray(Annotation[]::new);
Description testDescription = Description.createTestDescription(clazz, "invalidMethod", annotations);
final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> loggingListener.testStarted(testDescription));
assertThat(e.getMessage(), equalTo("invalid test logging annotation [abc:INFO:WARN]"));
}
public void testDuplicateLoggerBetweenTestLoggingAndTestIssueLogging() throws Exception {
final LoggingListener loggingListener = new LoggingListener();
final Description suiteDescription = Description.createSuiteDescription(DuplicateLoggerBetweenTestLoggingAndTestIssueLogging.class);
final IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> loggingListener.testRunStarted(suiteDescription)
);
assertThat(e, hasToString(containsString("found intersection [abc] between TestLogging and TestIssueLogging")));
}
/**
* Dummy | LoggingListenerTests |
java | elastic__elasticsearch | qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/RareTermsIT.java | {
"start": 971,
"end": 3259
} | class ____ extends ESRestTestCase {
private static final String index = "idx";
private int indexDocs(int numDocs, int id) throws Exception {
final Request request = new Request("POST", "/_bulk");
final StringBuilder builder = new StringBuilder();
for (int i = 0; i < numDocs; ++i) {
Object[] args = new Object[] { index, id++, i };
builder.append(Strings.format("""
{ "index" : { "_index" : "%s", "_id": "%s" } }
{"str_value" : "s%s"}
""", args));
}
request.setJsonEntity(builder.toString());
assertOK(client().performRequest(request));
return id;
}
public void testSingleValuedString() throws Exception {
final Settings.Builder settings = indexSettings(2, 0);
createIndex(index, settings.build());
// We want to trigger the usage oif cuckoo filters that happen only when there are
// more than 10k distinct values in one shard.
final int numDocs = randomIntBetween(12000, 17000);
int id = 1;
// Index every value 5 times
for (int i = 0; i < 5; i++) {
id = indexDocs(numDocs, id);
refreshAllIndices();
}
// There are no rare terms that only appear in one document
assertNumRareTerms(1, 0);
// All terms have a cardinality lower than 10
assertNumRareTerms(10, numDocs);
}
private void assertNumRareTerms(int maxDocs, int rareTerms) throws IOException {
final Request request = new Request("POST", index + "/_search");
request.setJsonEntity(Strings.format("""
{
"aggs": {
"rareTerms": {
"rare_terms": {
"field": "str_value.keyword",
"max_doc_count": %s
}
}
}
}""", maxDocs));
final Response response = client().performRequest(request);
assertOK(response);
final Object o = XContentMapValues.extractValue("aggregations.rareTerms.buckets", responseAsMap(response));
assertThat(o, Matchers.instanceOf(List.class));
assertThat(((List<?>) o).size(), Matchers.equalTo(rareTerms));
}
}
| RareTermsIT |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/PreconditionsInvalidPlaceholderTest.java | {
"start": 1516,
"end": 2534
} | class ____ {
int foo;
public void checkPositive(int x) {
// BUG: Diagnostic contains: %s > 0
checkArgument(x > 0, "%d > 0", x);
}
public void checkFoo() {
// BUG: Diagnostic contains: foo must be equal to 0 but was %s
Preconditions.checkState(foo == 0, "foo must be equal to 0 but was {0}", foo);
}
public void verifyFoo(int x) {
// BUG: Diagnostic contains:
Verify.verify(x > 0, "%d > 0", x);
}
}\
""")
.doTest();
}
@Test
public void negativeCase1() {
compilationHelper
.addSourceLines(
"PreconditionsInvalidPlaceholderNegativeCase1.java",
"""
package com.google.errorprone.bugpatterns.testdata;
import static com.google.common.base.Preconditions.checkArgument;
import com.google.common.base.Preconditions;
public | PreconditionsInvalidPlaceholderPositiveCase1 |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServices.java | {
"start": 2776,
"end": 3304
} | class ____ extends JerseyTestBase {
private static final Configuration CONF = new Configuration();
private static MockAppContext appContext;
@Override
protected Application configure() {
ResourceConfig config = new ResourceConfig();
config.register(new JerseyBinder());
config.register(AMWebServices.class);
config.register(GenericExceptionHandler.class);
config.register(new JettisonFeature());
config.register(JAXBContextResolver.class);
return config;
}
private static | TestAMWebServices |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/cfg/DatatypeFeature.java | {
"start": 189,
"end": 393
} | interface ____ extends JacksonFeature
{
/**
* Internal index used for efficient storage and index; no
* user serviceable contents inside!
*/
public int featureIndex();
}
| DatatypeFeature |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/custom/response/CompletionResponseParserTests.java | {
"start": 1333,
"end": 11155
} | class ____ extends AbstractBWCWireSerializationTestCase<CompletionResponseParser> {
public static CompletionResponseParser createRandom() {
return new CompletionResponseParser("$." + randomAlphaOfLength(5));
}
public void testFromMap() {
var validation = new ValidationException();
var parser = CompletionResponseParser.fromMap(
new HashMap<>(Map.of(COMPLETION_PARSER_RESULT, "$.result[*].text")),
"scope",
validation
);
assertThat(parser, is(new CompletionResponseParser("$.result[*].text")));
}
public void testFromMap_ThrowsException_WhenRequiredFieldIsNotPresent() {
var validation = new ValidationException();
var exception = expectThrows(
ValidationException.class,
() -> CompletionResponseParser.fromMap(new HashMap<>(Map.of("some_field", "$.result[*].text")), "scope", validation)
);
assertThat(
exception.getMessage(),
is("Validation Failed: 1: [scope.json_parser] does not contain the required setting [completion_result];")
);
}
public void testToXContent() throws IOException {
var entity = new CompletionResponseParser("$.result[*].text");
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
{
builder.startObject();
entity.toXContent(builder, null);
builder.endObject();
}
String xContentResult = Strings.toString(builder);
var expected = XContentHelper.stripWhitespace("""
{
"json_parser": {
"completion_result": "$.result[*].text"
}
}
""");
assertThat(xContentResult, is(expected));
}
public void testParse() throws IOException {
String responseJson = """
{
"request_id": "450fcb80-f796-****-8d69-e1e86d29aa9f",
"latency": 564.903929,
"result": [
{
"text":"completion results"
}
],
"usage": {
"output_tokens": 6320,
"input_tokens": 35,
"total_tokens": 6355
}
}
""";
var parser = new CompletionResponseParser("$.result[*].text");
ChatCompletionResults parsedResults = (ChatCompletionResults) parser.parse(
new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8))
);
assertThat(parsedResults, is(new ChatCompletionResults(List.of(new ChatCompletionResults.Result("completion results")))));
}
public void testParse_String() throws IOException {
String responseJson = """
{
"request_id": "450fcb80-f796-****-8d69-e1e86d29aa9f",
"latency": 564.903929,
"result": {
"text":"completion results"
},
"usage": {
"output_tokens": 6320,
"input_tokens": 35,
"total_tokens": 6355
}
}
""";
var parser = new CompletionResponseParser("$.result.text");
ChatCompletionResults parsedResults = (ChatCompletionResults) parser.parse(
new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8))
);
assertThat(parsedResults, is(new ChatCompletionResults(List.of(new ChatCompletionResults.Result("completion results")))));
}
public void testParse_MultipleResults() throws IOException {
String responseJson = """
{
"request_id": "450fcb80-f796-****-8d69-e1e86d29aa9f",
"latency": 564.903929,
"result": [
{
"text":"completion results"
},
{
"text":"completion results2"
}
],
"usage": {
"output_tokens": 6320,
"input_tokens": 35,
"total_tokens": 6355
}
}
""";
var parser = new CompletionResponseParser("$.result[*].text");
ChatCompletionResults parsedResults = (ChatCompletionResults) parser.parse(
new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8))
);
assertThat(
parsedResults,
is(
new ChatCompletionResults(
List.of(new ChatCompletionResults.Result("completion results"), new ChatCompletionResults.Result("completion results2"))
)
)
);
}
public void testParse_AnthropicFormat() throws IOException {
String responseJson = """
{
"id": "msg_01XzZQmG41BMGe5NZ5p2vEWb",
"type": "message",
"role": "assistant",
"model": "claude-3-opus-20240229",
"content": [
{
"type": "text",
"text": "result"
},
{
"type": "text",
"text": "result2"
}
],
"stop_reason": "end_turn",
"stop_sequence": null,
"usage": {
"input_tokens": 16,
"output_tokens": 326
}
}
""";
var parser = new CompletionResponseParser("$.content[*].text");
ChatCompletionResults parsedResults = (ChatCompletionResults) parser.parse(
new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8))
);
assertThat(
parsedResults,
is(new ChatCompletionResults(List.of(new ChatCompletionResults.Result("result"), new ChatCompletionResults.Result("result2"))))
);
}
public void testParse_ThrowsException_WhenExtractedField_IsNotAList() {
String responseJson = """
{
"request_id": "450fcb80-f796-****-8d69-e1e86d29aa9f",
"latency": 564.903929,
"result": "invalid_field",
"usage": {
"output_tokens": 6320,
"input_tokens": 35,
"total_tokens": 6355
}
}
""";
var parser = new CompletionResponseParser("$.result[*].text");
var exception = expectThrows(
IllegalArgumentException.class,
() -> parser.parse(new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)))
);
assertThat(
exception.getMessage(),
is(
"Current path [[*].text] matched the array field pattern "
+ "but the current object is not a list, found invalid type [String] instead."
)
);
}
public void testParse_ThrowsException_WhenExtractedField_IsNotListOfStrings() {
String responseJson = """
{
"request_id": "450fcb80-f796-****-8d69-e1e86d29aa9f",
"latency": 564.903929,
"result": ["string", true],
"usage": {
"output_tokens": 6320,
"input_tokens": 35,
"total_tokens": 6355
}
}
""";
var parser = new CompletionResponseParser("$.result");
var exception = expectThrows(
IllegalStateException.class,
() -> parser.parse(new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)))
);
assertThat(
exception.getMessage(),
is("Failed to parse list entry [1], error: Unable to convert field [$.result] of type [Boolean] to [String]")
);
}
public void testParse_ThrowsException_WhenExtractedField_IsNotAListOrString() {
String responseJson = """
{
"request_id": "450fcb80-f796-****-8d69-e1e86d29aa9f",
"latency": 564.903929,
"result": 123,
"usage": {
"output_tokens": 6320,
"input_tokens": 35,
"total_tokens": 6355
}
}
""";
var parser = new CompletionResponseParser("$.result");
var exception = expectThrows(
IllegalArgumentException.class,
() -> parser.parse(new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)))
);
assertThat(
exception.getMessage(),
is("Extracted field [result] from path [$.result] is an invalid type, expected a list or a string but received [Integer]")
);
}
@Override
protected CompletionResponseParser mutateInstanceForVersion(CompletionResponseParser instance, TransportVersion version) {
return instance;
}
@Override
protected Writeable.Reader<CompletionResponseParser> instanceReader() {
return CompletionResponseParser::new;
}
@Override
protected CompletionResponseParser createTestInstance() {
return createRandom();
}
@Override
protected CompletionResponseParser mutateInstance(CompletionResponseParser instance) throws IOException {
String completionResultPath = randomValueOtherThan(instance.getCompletionResultPath(), () -> "$." + randomAlphaOfLength(5));
return new CompletionResponseParser(completionResultPath);
}
}
| CompletionResponseParserTests |
java | apache__camel | components/camel-keycloak/src/main/java/org/apache/camel/component/keycloak/KeycloakConstants.java | {
"start": 901,
"end": 8624
} | class ____ {
@Metadata(description = "The operation to perform", javaType = "org.apache.camel.component.keycloak.KeycloakOperations")
public static final String OPERATION = "CamelKeycloakOperation";
@Metadata(description = "The realm name", javaType = "String")
public static final String REALM_NAME = "CamelKeycloakRealmName";
@Metadata(description = "The user ID", javaType = "String")
public static final String USER_ID = "CamelKeycloakUserId";
@Metadata(description = "The username", javaType = "String")
public static final String USERNAME = "CamelKeycloakUsername";
@Metadata(description = "The user email", javaType = "String")
public static final String USER_EMAIL = "CamelKeycloakUserEmail";
@Metadata(description = "The user first name", javaType = "String")
public static final String USER_FIRST_NAME = "CamelKeycloakUserFirstName";
@Metadata(description = "The user last name", javaType = "String")
public static final String USER_LAST_NAME = "CamelKeycloakUserLastName";
@Metadata(description = "The role ID", javaType = "String")
public static final String ROLE_ID = "CamelKeycloakRoleId";
@Metadata(description = "The role name", javaType = "String")
public static final String ROLE_NAME = "CamelKeycloakRoleName";
@Metadata(description = "The role description", javaType = "String")
public static final String ROLE_DESCRIPTION = "CamelKeycloakRoleDescription";
@Metadata(description = "The group ID", javaType = "String")
public static final String GROUP_ID = "CamelKeycloakGroupId";
@Metadata(description = "The group name", javaType = "String")
public static final String GROUP_NAME = "CamelKeycloakGroupName";
@Metadata(description = "The client ID", javaType = "String")
public static final String CLIENT_ID = "CamelKeycloakClientId";
@Metadata(description = "The client UUID", javaType = "String")
public static final String CLIENT_UUID = "CamelKeycloakClientUuid";
@Metadata(description = "The user password", javaType = "String")
public static final String USER_PASSWORD = "CamelKeycloakUserPassword";
@Metadata(description = "Whether the password is temporary", javaType = "Boolean")
public static final String PASSWORD_TEMPORARY = "CamelKeycloakPasswordTemporary";
@Metadata(description = "Search query string", javaType = "String")
public static final String SEARCH_QUERY = "CamelKeycloakSearchQuery";
@Metadata(description = "Maximum number of results", javaType = "Integer")
public static final String MAX_RESULTS = "CamelKeycloakMaxResults";
@Metadata(description = "First result index", javaType = "Integer")
public static final String FIRST_RESULT = "CamelKeycloakFirstResult";
@Metadata(description = "The client scope ID", javaType = "String")
public static final String CLIENT_SCOPE_ID = "CamelKeycloakClientScopeId";
@Metadata(description = "The client scope name", javaType = "String")
public static final String CLIENT_SCOPE_NAME = "CamelKeycloakClientScopeName";
@Metadata(description = "The event type (event or admin-event)", javaType = "String")
public static final String EVENT_TYPE = "CamelKeycloakEventType";
@Metadata(description = "The event ID or timestamp", javaType = "Long")
public static final String EVENT_ID = "CamelKeycloakEventId";
// Identity Provider constants
@Metadata(description = "The identity provider alias", javaType = "String")
public static final String IDP_ALIAS = "CamelKeycloakIdpAlias";
@Metadata(description = "The identity provider ID", javaType = "String")
public static final String IDP_ID = "CamelKeycloakIdpId";
// Authorization Services constants
@Metadata(description = "The resource ID", javaType = "String")
public static final String RESOURCE_ID = "CamelKeycloakResourceId";
@Metadata(description = "The resource name", javaType = "String")
public static final String RESOURCE_NAME = "CamelKeycloakResourceName";
@Metadata(description = "The resource type", javaType = "String")
public static final String RESOURCE_TYPE = "CamelKeycloakResourceType";
@Metadata(description = "The resource URI", javaType = "String")
public static final String RESOURCE_URI = "CamelKeycloakResourceUri";
@Metadata(description = "The policy ID", javaType = "String")
public static final String POLICY_ID = "CamelKeycloakPolicyId";
@Metadata(description = "The policy name", javaType = "String")
public static final String POLICY_NAME = "CamelKeycloakPolicyName";
@Metadata(description = "The policy type", javaType = "String")
public static final String POLICY_TYPE = "CamelKeycloakPolicyType";
@Metadata(description = "The permission ID", javaType = "String")
public static final String PERMISSION_ID = "CamelKeycloakPermissionId";
@Metadata(description = "The permission name", javaType = "String")
public static final String PERMISSION_NAME = "CamelKeycloakPermissionName";
@Metadata(description = "The scope name", javaType = "String")
public static final String SCOPE_NAME = "CamelKeycloakScopeName";
// User Attribute constants
@Metadata(description = "The user attribute name", javaType = "String")
public static final String ATTRIBUTE_NAME = "CamelKeycloakAttributeName";
@Metadata(description = "The user attribute value", javaType = "String")
public static final String ATTRIBUTE_VALUE = "CamelKeycloakAttributeValue";
// User Credential constants
@Metadata(description = "The credential ID", javaType = "String")
public static final String CREDENTIAL_ID = "CamelKeycloakCredentialId";
@Metadata(description = "The credential type", javaType = "String")
public static final String CREDENTIAL_TYPE = "CamelKeycloakCredentialType";
// User Action constants
@Metadata(description = "The required action type", javaType = "String")
public static final String REQUIRED_ACTION = "CamelKeycloakRequiredAction";
@Metadata(description = "The list of actions to execute", javaType = "java.util.List<String>")
public static final String ACTIONS = "CamelKeycloakActions";
@Metadata(description = "The redirect URI", javaType = "String")
public static final String REDIRECT_URI = "CamelKeycloakRedirectUri";
@Metadata(description = "The lifespan in seconds", javaType = "Integer")
public static final String LIFESPAN = "CamelKeycloakLifespan";
// Bulk operations constants
@Metadata(description = "The list of users for bulk operations",
javaType = "java.util.List<org.keycloak.representations.idm.UserRepresentation>")
public static final String USERS = "CamelKeycloakUsers";
@Metadata(description = "The list of user IDs for bulk operations", javaType = "java.util.List<String>")
public static final String USER_IDS = "CamelKeycloakUserIds";
@Metadata(description = "The list of usernames for bulk operations", javaType = "java.util.List<String>")
public static final String USERNAMES = "CamelKeycloakUsernames";
@Metadata(description = "The list of role names for bulk operations", javaType = "java.util.List<String>")
public static final String ROLE_NAMES = "CamelKeycloakRoleNames";
@Metadata(description = "Continue on error during bulk operations", javaType = "Boolean")
public static final String CONTINUE_ON_ERROR = "CamelKeycloakContinueOnError";
@Metadata(description = "Batch size for bulk operations", javaType = "Integer")
public static final String BATCH_SIZE = "CamelKeycloakBatchSize";
private KeycloakConstants() {
// Utility class
}
}
| KeycloakConstants |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converted/converter/AttributeConverterDefinitionTest.java | {
"start": 2560,
"end": 2845
} | class ____ implements AttrConverterSameType<String> {
@Override
public String convertToDatabaseColumn(String attribute) {
return attribute;
}
@Override
public String convertToEntityAttribute(String dbData) {
return dbData;
}
}
public static | AttrConverterSameTypeImpl |
java | alibaba__nacos | core/src/main/java/com/alibaba/nacos/core/cluster/lookup/FileConfigMemberLookup.java | {
"start": 1337,
"end": 3020
} | class ____ extends AbstractMemberLookup {
private static final String DEFAULT_SEARCH_SEQ = "cluster.conf";
private FileWatcher watcher = new FileWatcher() {
@Override
public void onChange(FileChangeEvent event) {
readClusterConfFromDisk();
}
@Override
public boolean interest(String context) {
return StringUtils.contains(context, DEFAULT_SEARCH_SEQ);
}
};
@Override
public void doStart() throws NacosException {
readClusterConfFromDisk();
// Use the inotify mechanism to monitor file changes and automatically
// trigger the reading of cluster.conf
try {
WatchFileCenter.registerWatcher(EnvUtil.getConfPath(), watcher);
} catch (Throwable e) {
Loggers.CLUSTER.error("An exception occurred in the launch file monitor : {}", e.getMessage());
}
}
@Override
public boolean useAddressServer() {
return false;
}
@Override
protected void doDestroy() throws NacosException {
WatchFileCenter.deregisterWatcher(EnvUtil.getConfPath(), watcher);
}
private void readClusterConfFromDisk() {
Collection<Member> tmpMembers = new ArrayList<>();
try {
List<String> tmp = EnvUtil.readClusterConf();
tmpMembers = MemberUtil.readServerConf(tmp);
} catch (Throwable e) {
Loggers.CLUSTER
.error("nacos-XXXX [serverlist] failed to get serverlist from disk!, error : {}", e.getMessage());
}
afterLookup(tmpMembers);
}
}
| FileConfigMemberLookup |
java | quarkusio__quarkus | extensions/smallrye-jwt/deployment/src/test/java/io/quarkus/jwt/test/dev/SmallryeJwtPersistentDevModeEncryptedTest.java | {
"start": 625,
"end": 2121
} | class ____ {
@RegisterExtension
static QuarkusUnitTest unitTest = new QuarkusUnitTest()
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
.addClasses(GreetingResource.class, SmallryeJwtPersistentDevModeSignedTest.PersistentJwtChainBuilder.class))
.addBuildChainCustomizer(new SmallryeJwtPersistentDevModeSignedTest.PersistentJwtChainBuilder() {
@Override
public void accept(BuildChainBuilder chain) {
super.accept(chain);
chain.addBuildStep(new BuildStep() {
@Override
public void execute(BuildContext context) {
context.produce(new GenerateEncryptedDevModeJwtKeysBuildItem());
}
})
.produces(GenerateEncryptedDevModeJwtKeysBuildItem.class)
.build();
}
});
@Test
void canBeEncrypted() {
// make sure we can sign JWT tokens recognised by the server, since they use the same config
String token = Jwt.upn("jdoe@quarkus.io")
.groups("User")
.innerSign().encrypt();
RestAssured.given()
.header(new Header("Authorization", "Bearer " + token))
.get("/only-user")
.then().assertThat().statusCode(200);
}
}
| SmallryeJwtPersistentDevModeEncryptedTest |
java | micronaut-projects__micronaut-core | test-suite/src/test/java/io/micronaut/docs/aop/around_reactive/TxSpec.java | {
"start": 914,
"end": 4661
} | class ____ {
@RepeatedTest(10)
public void testReactiveTx() {
try (ApplicationContext applicationContext = ApplicationContext.run()) {
TxManager txManager = applicationContext.getBean(TxManager.class);
TxExample exampleBean = applicationContext.getBean(TxExample.class);
Assertions.assertTrue(txManager.getTransactionsLog().isEmpty());
String job = Mono.from(
exampleBean.doWorkMono("job1")
).block();
Assertions.assertEquals(List.of("OPEN TX1", "IN TX1", "COMMIT TX1"), txManager.getTransactionsLog());
Assertions.assertEquals("Doing job: job1 in transaction: TX1", job);
}
}
@RepeatedTest(10)
public void testTwoFluxReactiveTx() {
try (ApplicationContext applicationContext = ApplicationContext.run()) {
TxManager txManager = applicationContext.getBean(TxManager.class);
TxExample exampleBean = applicationContext.getBean(TxExample.class);
Assertions.assertTrue(txManager.getTransactionsLog().isEmpty());
List<String> results = new ArrayList<>();
Flux.from(
exampleBean.doWorkFlux("job1").doOnNext(results::add)
).thenMany(
Flux.from(
exampleBean.doWorkFlux("job2").doOnNext(results::add)
)
).collectList().block();
Assertions.assertEquals(List.of("OPEN TX1", "IN TX1", "COMMIT TX1", "OPEN TX2", "IN TX2", "COMMIT TX2"), txManager.getTransactionsLog());
Assertions.assertEquals(List.of("Doing job: job1 in transaction: TX1", "Doing job: job2 in transaction: TX2"), results);
}
}
@RepeatedTest(10)
public void testTwoMonoReactiveTx() {
try (ApplicationContext applicationContext = ApplicationContext.run()) {
TxManager txManager = applicationContext.getBean(TxManager.class);
TxExample exampleBean = applicationContext.getBean(TxExample.class);
Assertions.assertTrue(txManager.getTransactionsLog().isEmpty());
List<String> results = new ArrayList<>();
Mono.from(
exampleBean.doWorkMono("job1").doOnNext(results::add)
).flatMap(result ->
Mono.from(
exampleBean.doWorkMono("job2").doOnNext(results::add)
)
).block();
Assertions.assertEquals(List.of("OPEN TX1", "IN TX1", "COMMIT TX1", "OPEN TX2", "IN TX2", "COMMIT TX2"), txManager.getTransactionsLog());
Assertions.assertEquals(List.of("Doing job: job1 in transaction: TX1", "Doing job: job2 in transaction: TX2"), results);
}
}
@RepeatedTest(10)
public void testTwoMonoReactiveTx2() {
try (ApplicationContext applicationContext = ApplicationContext.run()) {
TxManager txManager = applicationContext.getBean(TxManager.class);
TxExample exampleBean = applicationContext.getBean(TxExample.class);
Assertions.assertTrue(txManager.getTransactionsLog().isEmpty());
List<String> results = new ArrayList<>();
Flux.from(
exampleBean.doWorkMono("job1").doOnNext(results::add)
).then(
Mono.from(
exampleBean.doWorkMono("job2").doOnNext(results::add)
)
).block();
Assertions.assertEquals(List.of("OPEN TX1", "IN TX1", "COMMIT TX1", "OPEN TX2", "IN TX2", "COMMIT TX2"), txManager.getTransactionsLog());
Assertions.assertEquals(List.of("Doing job: job1 in transaction: TX1", "Doing job: job2 in transaction: TX2"), results);
}
}
// end::test[]
}
| TxSpec |
java | apache__hadoop | hadoop-cloud-storage-project/hadoop-cos/src/main/java/org/apache/hadoop/fs/cosn/FileMetadata.java | {
"start": 1124,
"end": 1946
} | class ____ {
private final String key;
private final long length;
private final long lastModified;
private final boolean isFile;
FileMetadata(String key, long length, long lastModified) {
this(key, length, lastModified, true);
}
FileMetadata(String key, long length, long lastModified, boolean isFile) {
this.key = key;
this.length = length;
this.lastModified = lastModified;
this.isFile = isFile;
}
public String getKey() {
return key;
}
public long getLength() {
return length;
}
public long getLastModified() {
return lastModified;
}
@Override
public String toString() {
return "FileMetadata[" + key + ", " + length + ", " + lastModified + ", "
+ "file?" + isFile + "]";
}
public boolean isFile() {
return isFile;
}
}
| FileMetadata |
java | netty__netty | codec-http2/src/test/java/io/netty/handler/codec/http2/DefaultHttp2RemoteFlowControllerTest.java | {
"start": 43680,
"end": 47125
} | class ____ implements Http2RemoteFlowController.FlowControlled {
private int currentPadding;
private int currentPayloadSize;
private int originalPayloadSize;
private int originalPadding;
private boolean writeCalled;
private final boolean mergeable;
private boolean merged;
private Throwable t;
private FakeFlowControlled(int size) {
this(size, false);
}
private FakeFlowControlled(int size, boolean mergeable) {
this(size, 0, mergeable);
}
private FakeFlowControlled(int payloadSize, int padding, boolean mergeable) {
currentPayloadSize = originalPayloadSize = payloadSize;
currentPadding = originalPadding = padding;
this.mergeable = mergeable;
}
@Override
public int size() {
return currentPayloadSize + currentPadding;
}
private int originalSize() {
return originalPayloadSize + originalPadding;
}
@Override
public void error(ChannelHandlerContext ctx, Throwable t) {
this.t = t;
}
@Override
public void writeComplete() {
}
@Override
public void write(ChannelHandlerContext ctx, int allowedBytes) {
if (allowedBytes <= 0 && size() != 0) {
// Write has been called but no data can be written
return;
}
writeCalled = true;
int written = Math.min(size(), allowedBytes);
if (written > currentPayloadSize) {
written -= currentPayloadSize;
currentPayloadSize = 0;
currentPadding -= written;
} else {
currentPayloadSize -= written;
}
}
@Override
public boolean merge(ChannelHandlerContext ctx, Http2RemoteFlowController.FlowControlled next) {
if (mergeable && next instanceof FakeFlowControlled) {
FakeFlowControlled ffcNext = (FakeFlowControlled) next;
originalPayloadSize += ffcNext.originalPayloadSize;
currentPayloadSize += ffcNext.originalPayloadSize;
currentPadding = originalPadding = Math.max(originalPadding, ffcNext.originalPadding);
ffcNext.merged = true;
return true;
}
return false;
}
public int written() {
return originalSize() - size();
}
public void assertNotWritten() {
assertFalse(writeCalled);
}
public void assertPartiallyWritten(int expectedWritten) {
assertPartiallyWritten(expectedWritten, 0);
}
public void assertPartiallyWritten(int expectedWritten, int delta) {
assertTrue(writeCalled);
assertEquals(expectedWritten, written(), delta);
}
public void assertFullyWritten() {
assertTrue(writeCalled);
assertEquals(0, currentPayloadSize);
assertEquals(0, currentPadding);
}
public boolean assertMerged() {
return merged;
}
public void assertError(Http2Error error) {
assertNotNull(t);
if (error != null) {
assertSame(error, ((Http2Exception) t).error());
}
}
}
}
| FakeFlowControlled |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationMetadata.java | {
"start": 9042,
"end": 12394
} | class ____ implements Writeable, ToXContentFragment {
public static final String MISSING_VALUE_MARKER = "_absent_";
private final String nodeId;
private final String nodeName;
public VotingConfigExclusion(DiscoveryNode node) {
this(node.getId(), node.getName());
}
public VotingConfigExclusion(StreamInput in) throws IOException {
this.nodeId = in.readString();
this.nodeName = in.readString();
}
public VotingConfigExclusion(String nodeId, String nodeName) {
this.nodeId = nodeId;
this.nodeName = nodeName;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(nodeId);
out.writeString(nodeName);
}
public String getNodeId() {
return nodeId;
}
public String getNodeName() {
return nodeName;
}
private static final ParseField NODE_ID_PARSE_FIELD = new ParseField("node_id");
private static final ParseField NODE_NAME_PARSE_FIELD = new ParseField("node_name");
private static String nodeId(Object[] nodeIdAndName) {
return (String) nodeIdAndName[0];
}
private static String nodeName(Object[] nodeIdAndName) {
return (String) nodeIdAndName[1];
}
private static final ConstructingObjectParser<VotingConfigExclusion, Void> PARSER = new ConstructingObjectParser<>(
"voting_config_exclusion",
nodeIdAndName -> new VotingConfigExclusion(nodeId(nodeIdAndName), nodeName(nodeIdAndName))
);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), NODE_ID_PARSE_FIELD);
PARSER.declareString(ConstructingObjectParser.constructorArg(), NODE_NAME_PARSE_FIELD);
}
public static VotingConfigExclusion fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.startObject()
.field(NODE_ID_PARSE_FIELD.getPreferredName(), nodeId)
.field(NODE_NAME_PARSE_FIELD.getPreferredName(), nodeName)
.endObject();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
VotingConfigExclusion that = (VotingConfigExclusion) o;
return Objects.equals(nodeId, that.nodeId) && Objects.equals(nodeName, that.nodeName);
}
@Override
public int hashCode() {
return Objects.hash(nodeId, nodeName);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
if (nodeName.length() > 0) {
sb.append('{').append(nodeName).append('}');
}
sb.append('{').append(nodeId).append('}');
return sb.toString();
}
}
/**
* A collection of persistent node ids, denoting the voting configuration for cluster state changes.
*/
public static | VotingConfigExclusion |
java | quarkusio__quarkus | extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/devmode/DevModeInterceptor.java | {
"start": 170,
"end": 899
} | class ____ implements ServerInterceptor {
private final ClassLoader classLoader;
public DevModeInterceptor(ClassLoader contextClassLoader) {
classLoader = contextClassLoader;
}
@Override
public <ReqT, RespT> ServerCall.Listener<ReqT> interceptCall(ServerCall<ReqT, RespT> serverCall, Metadata metadata,
ServerCallHandler<ReqT, RespT> next) {
ClassLoader originalTccl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(classLoader);
try {
return next.startCall(serverCall, metadata);
} finally {
Thread.currentThread().setContextClassLoader(originalTccl);
}
}
}
| DevModeInterceptor |
java | netty__netty | codec-socks/src/test/java/io/netty/handler/codec/socks/SocksAuthRequestDecoderTest.java | {
"start": 1014,
"end": 2681
} | class ____ {
private static final String username = "testUserName";
private static final String password = "testPassword";
@Test
public void testAuthRequestDecoder() {
SocksAuthRequest msg = new SocksAuthRequest(username, password);
SocksAuthRequestDecoder decoder = new SocksAuthRequestDecoder();
EmbeddedChannel embedder = new EmbeddedChannel(decoder);
SocksCommonTestUtils.writeMessageIntoEmbedder(embedder, msg);
msg = embedder.readInbound();
assertEquals(username, msg.username());
assertEquals(password, msg.password());
assertNull(embedder.readInbound());
}
@Test
public void testAuthRequestDecoderPartialSend() {
EmbeddedChannel ch = new EmbeddedChannel(new SocksAuthRequestDecoder());
ByteBuf byteBuf = Unpooled.buffer(16);
// Send username and password size
byteBuf.writeByte(SocksSubnegotiationVersion.AUTH_PASSWORD.byteValue());
byteBuf.writeByte(username.length());
byteBuf.writeBytes(username.getBytes());
byteBuf.writeByte(password.length());
ch.writeInbound(byteBuf);
// Check that channel is empty
assertNull(ch.readInbound());
// Send password
ByteBuf byteBuf2 = Unpooled.buffer();
byteBuf2.writeBytes(password.getBytes());
ch.writeInbound(byteBuf2);
// Read message from channel
SocksAuthRequest msg = ch.readInbound();
// Check message
assertEquals(username, msg.username());
assertEquals(password, msg.password());
assertFalse(ch.finishAndReleaseAll());
}
}
| SocksAuthRequestDecoderTest |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/util/subpackage/Component.java | {
"start": 1046,
"end": 1877
} | class ____ {
private Integer number;
private String text;
public Integer getNumber() {
return this.number;
}
public String getText() {
return this.text;
}
@Autowired
protected void configure(Integer number, String text) {
this.number = number;
this.text = text;
}
@PostConstruct
protected void init() {
Assert.state(number != null, "number must not be null");
Assert.state(StringUtils.hasText(text), "text must not be empty");
}
@PreDestroy
protected void destroy() {
this.number = null;
this.text = null;
}
int subtract(int a, int b) {
return a - b;
}
int add(int... args) {
int sum = 0;
for (int arg : args) {
sum += arg;
}
return sum;
}
int multiply(Integer... args) {
int product = 1;
for (Integer arg : args) {
product *= arg;
}
return product;
}
}
| Component |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/QueueCapacityVector.java | {
"start": 8272,
"end": 8553
} | enum ____ {
PERCENTAGE("%"), ABSOLUTE(""), WEIGHT("w");
private final String postfix;
ResourceUnitCapacityType(String postfix) {
this.postfix = postfix;
}
public String getPostfix() {
return postfix;
}
}
public static | ResourceUnitCapacityType |
java | dropwizard__dropwizard | dropwizard-example/src/main/java/com/example/helloworld/filter/DateNotSpecifiedFilter.java | {
"start": 354,
"end": 812
} | class ____ implements ContainerRequestFilter {
@Override
public void filter(ContainerRequestContext requestContext) throws IOException {
final String dateHeader = requestContext.getHeaderString(HttpHeaders.DATE);
if (dateHeader == null) {
throw new WebApplicationException(new IllegalArgumentException("Date Header was not specified"),
Response.Status.BAD_REQUEST);
}
}
}
| DateNotSpecifiedFilter |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inject/MoreThanOneQualifierTest.java | {
"start": 2626,
"end": 3179
} | class ____ {
// BUG: Diagnostic contains: remove
@Bar1
// BUG: Diagnostic contains: remove
@Bar2
private int n;
// BUG: Diagnostic contains: remove
@Bar1
// BUG: Diagnostic contains: remove
@Bar2
public TestClass2() {}
// BUG: Diagnostic contains: remove
@Bar1
// BUG: Diagnostic contains: remove
@Bar2
public void setN(
// BUG: Diagnostic contains: remove
@Bar1
// BUG: Diagnostic contains: remove
@Bar2
int n) {}
}
/**
* A | TestClass2 |
java | alibaba__nacos | naming/src/test/java/com/alibaba/nacos/naming/core/v2/cleaner/EmptyServiceAutoCleanerV2Test.java | {
"start": 1581,
"end": 2957
} | class ____ {
@Mock
private ClientServiceIndexesManager clientServiceIndexesManager;
@Mock
private ServiceStorage serviceStorage;
private EmptyServiceAutoCleanerV2 emptyServiceAutoCleanerV2;
@Mock
private Service service;
@BeforeEach
void setUp() {
EnvUtil.setEnvironment(new MockEnvironment());
emptyServiceAutoCleanerV2 = new EmptyServiceAutoCleanerV2(clientServiceIndexesManager, serviceStorage);
Mockito.when(service.getNamespace()).thenReturn("public");
ServiceManager serviceManager = ServiceManager.getInstance();
serviceManager.getSingleton(service);
}
@AfterEach
void tearDown() {
ServiceManager.getInstance().removeSingleton(service);
}
@Test
void testGetType() {
assertEquals("emptyService", emptyServiceAutoCleanerV2.getType());
}
@Test
void testDoClean() {
try {
Mockito.when(clientServiceIndexesManager.getAllClientsRegisteredService(Mockito.any())).thenReturn(Collections.emptyList());
Mockito.when(service.getLastUpdatedTime()).thenReturn(0L);
emptyServiceAutoCleanerV2.doClean();
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
}
| EmptyServiceAutoCleanerV2Test |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/view/BaseViewTests.java | {
"start": 8869,
"end": 9649
} | class ____ extends AbstractView {
private final WebApplicationContext wac;
boolean initialized;
/** Captured model in render */
Map<String, Object> model;
TestView(WebApplicationContext wac) {
this.wac = wac;
}
@Override
protected void renderMergedOutputModel(Map<String, Object> model, HttpServletRequest request,
HttpServletResponse response) {
this.model = model;
}
/**
* @see org.springframework.context.support.ApplicationObjectSupport#initApplicationContext()
*/
@Override
protected void initApplicationContext() throws ApplicationContextException {
if (initialized) {
throw new RuntimeException("Already initialized");
}
this.initialized = true;
assertThat(getApplicationContext()).isSameAs(wac);
}
}
}
| TestView |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchRequest.java | {
"start": 1179,
"end": 1348
} | class ____ the data needed to create a watch along with the name of the watch.
* The name of the watch will become the ID of the indexed document.
*/
public final | contains |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/bean/BeanInvokeToStringMethodTest.java | {
"start": 989,
"end": 1586
} | class ____ extends ContextTestSupport {
@Test
public void testInvokeToString() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("MyFooBean");
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").bean(MyFooBean.class, "toString").to("mock:result");
}
};
}
}
| BeanInvokeToStringMethodTest |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/join/lookup/keyordered/Epoch.java | {
"start": 1448,
"end": 4684
} | class ____<OUT> {
/** The completed ones in this epoch which is not the active epoch. */
private final Deque<StreamElementQueueEntry<OUT>> outputQueue;
/** The watermark binding to the epoch. */
private final Watermark watermark;
/** The number of records that are still ongoing in this epoch. */
private int ongoingRecordCount;
@Nullable private Consumer<StreamElementQueueEntry<OUT>> output;
/** The action associated with non-record of this epoch(e.g. advance watermark). */
@Nullable private Runnable advanceWatermark;
private EpochStatus status;
public Epoch(Watermark watermark) {
this.status = EpochStatus.OPEN;
this.ongoingRecordCount = 0;
this.advanceWatermark = null;
this.output = null;
this.outputQueue = new LinkedList<>();
this.watermark = watermark;
}
/** Add resultFuture to the output queue. */
public void collect(StreamElementQueueEntry<OUT> resultFuture) {
outputQueue.add(resultFuture);
}
public void setOutput(Consumer<StreamElementQueueEntry<OUT>> outputConsumer) {
if (output == null) {
this.output = outputConsumer;
}
}
public void decrementCount() {
Preconditions.checkState(ongoingRecordCount > 0);
ongoingRecordCount--;
}
public void incrementCount() {
ongoingRecordCount++;
}
public Watermark getWatermark() {
return watermark;
}
/**
* Try to finish this epoch.
*
* @return whether this epoch has been finished.
*/
boolean tryFinish() {
if (this.status == EpochStatus.FINISHED) {
return true;
}
while (!outputQueue.isEmpty()) {
assert output != null;
output.accept(outputQueue.poll());
}
if (ongoingRecordCount == 0 && this.status == EpochStatus.CLOSED) {
this.status = EpochStatus.FINISHED;
if (advanceWatermark != null) {
advanceWatermark.run();
}
return true;
}
return false;
}
/** Close this epoch. */
public void close(Runnable advanceWatermark) {
this.advanceWatermark = advanceWatermark;
this.status = EpochStatus.CLOSED;
}
public void free() {
this.outputQueue.clear();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
Epoch<?> other = (Epoch<?>) obj;
return ongoingRecordCount == other.ongoingRecordCount
&& Objects.equals(watermark, other.watermark)
&& status == other.status;
}
@Override
public int hashCode() {
return Objects.hash(watermark.hashCode(), ongoingRecordCount, status);
}
@Override
public String toString() {
return String.format(
"Epoch{watermark=%s, ongoingRecord=%d}", watermark, ongoingRecordCount);
}
@VisibleForTesting
public int getOngoingRecordCount() {
return ongoingRecordCount;
}
/** The status of an epoch. */
| Epoch |
java | google__dagger | javatests/dagger/internal/codegen/LazyClassKeyMapBindingComponentProcessorTest.java | {
"start": 8875,
"end": 9376
} | interface ____ {",
" Map<Class<?>, Provider<Integer>> classKey();",
"}");
CompilerTests.daggerCompiler(fooBar, fooBar2, mapKeyBindingsModule, componentFile)
.withProcessingOptions(compilerMode.processorOptions())
.compile(subject -> subject.hasErrorCount(0));
}
@Test
public void scopedLazyClassKeyProvider_compilesSuccessfully() throws Exception {
Source fooBar =
CompilerTests.javaSource("test.Foo_Bar", "package test;", "", " | TestComponent |
java | resilience4j__resilience4j | resilience4j-ratelimiter/src/main/java/io/github/resilience4j/ratelimiter/event/RateLimiterOnFailureEvent.java | {
"start": 699,
"end": 1103
} | class ____ extends AbstractRateLimiterEvent {
public RateLimiterOnFailureEvent(String rateLimiterName) {
super(rateLimiterName, 1);
}
public RateLimiterOnFailureEvent(String rateLimiterName, int numberOfPermits) {
super(rateLimiterName, numberOfPermits);
}
@Override
public Type getEventType() {
return Type.FAILED_ACQUIRE;
}
}
| RateLimiterOnFailureEvent |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/LangChain4jToolsEndpointBuilderFactory.java | {
"start": 18596,
"end": 19994
} | interface ____
extends
AdvancedLangChain4jToolsEndpointConsumerBuilder,
AdvancedLangChain4jToolsEndpointProducerBuilder {
default LangChain4jToolsEndpointBuilder basic() {
return (LangChain4jToolsEndpointBuilder) this;
}
/**
* Chat Model of type dev.langchain4j.model.chat.ChatModel.
*
* The option is a: <code>dev.langchain4j.model.chat.ChatModel</code>
* type.
*
* Group: advanced
*
* @param chatModel the value to set
* @return the dsl builder
*/
default AdvancedLangChain4jToolsEndpointBuilder chatModel(dev.langchain4j.model.chat.ChatModel chatModel) {
doSetProperty("chatModel", chatModel);
return this;
}
/**
* Chat Model of type dev.langchain4j.model.chat.ChatModel.
*
* The option will be converted to a
* <code>dev.langchain4j.model.chat.ChatModel</code> type.
*
* Group: advanced
*
* @param chatModel the value to set
* @return the dsl builder
*/
default AdvancedLangChain4jToolsEndpointBuilder chatModel(String chatModel) {
doSetProperty("chatModel", chatModel);
return this;
}
}
public | AdvancedLangChain4jToolsEndpointBuilder |
java | netty__netty | transport/src/main/java/io/netty/channel/ServerChannelRecvByteBufAllocator.java | {
"start": 791,
"end": 1162
} | class ____ extends DefaultMaxMessagesRecvByteBufAllocator {
public ServerChannelRecvByteBufAllocator() {
super(1, true);
}
@Override
public Handle newHandle() {
return new MaxMessageHandle() {
@Override
public int guess() {
return 128;
}
};
}
}
| ServerChannelRecvByteBufAllocator |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/misc/CustomPartitioningITCase.java | {
"start": 1350,
"end": 1913
} | class ____ extends JavaProgramTestBaseJUnit4 {
@Override
protected void testProgram() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
Assert.assertTrue(env.getParallelism() > 1);
env.fromSequence(1, 1000)
.partitionCustom(new AllZeroPartitioner(), new IdKeySelector<Long>())
.map(new FailExceptInPartitionZeroMapper())
.sinkTo(new DiscardingSink<>());
env.execute();
}
private static | CustomPartitioningITCase |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/manual/ManualTriggerEngine.java | {
"start": 860,
"end": 2789
} | class ____ implements TriggerEngine<ManualTrigger, ManualTriggerEvent> {
static final String TYPE = "manual";
@Override
public String type() {
return TYPE;
}
/**
* It's the responsibility of the trigger engine implementation to select the appropriate jobs
* from the given list of jobs
*/
@Override
public void start(Collection<Watch> jobs) {}
@Override
public void stop() {}
@Override
public void register(Consumer<Iterable<TriggerEvent>> consumer) {}
@Override
public void add(Watch job) {}
@Override
public void pauseExecution() {}
@Override
public boolean remove(String jobId) {
return false;
}
@Override
public ManualTriggerEvent simulateEvent(String jobId, @Nullable Map<String, Object> data, TriggerService service) {
if (data == null) {
throw illegalArgument("could not simulate manual trigger event. missing required simulated trigger type");
}
if (data.size() == 1) {
String type = data.keySet().iterator().next();
return new ManualTriggerEvent(jobId, service.simulateEvent(type, jobId, data));
}
Object type = data.get("type");
if (type instanceof String s) {
return new ManualTriggerEvent(jobId, service.simulateEvent(s, jobId, data));
}
throw illegalArgument("could not simulate manual trigger event. could not resolve simulated trigger type");
}
@Override
public ManualTrigger parseTrigger(String context, XContentParser parser) throws IOException {
return ManualTrigger.parse(parser);
}
@Override
public ManualTriggerEvent parseTriggerEvent(TriggerService service, String watchId, String context, XContentParser parser)
throws IOException {
return ManualTriggerEvent.parse(service, watchId, context, parser);
}
}
| ManualTriggerEngine |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/javadoc/EmptyBlockTagTest.java | {
"start": 1476,
"end": 1695
} | interface ____ {
/**
* @param p
*/
void foo(int p);
}
""")
.addOutputLines(
"Test.java",
"""
| Test |
java | apache__dubbo | dubbo-cluster/src/main/java/org/apache/dubbo/rpc/cluster/router/affinity/config/AffinityServiceStateRouterFactory.java | {
"start": 1171,
"end": 1468
} | class ____ extends CacheableStateRouterFactory {
public static final String NAME = "affinity_service";
@Override
protected <T> StateRouter<T> createRouter(Class<T> interfaceClass, URL url) {
return new AffinityServiceStateRouter<T>(url);
}
}
| AffinityServiceStateRouterFactory |
java | apache__camel | components/camel-http/src/test/java/org/apache/camel/component/http/ManagedHttpProducerPoolStatsTest.java | {
"start": 1445,
"end": 4377
} | class ____ extends BaseHttpTest {
private HttpServer localServer;
@Override
protected boolean useJmx() {
return true;
}
@Override
public void setupResources() throws Exception {
localServer = ServerBootstrap.bootstrap()
.setCanonicalHostName("localhost").setHttpProcessor(getBasicHttpProcessor())
.setConnectionReuseStrategy(getConnectionReuseStrategy()).setResponseFactory(getHttpResponseFactory())
.setSslContext(getSSLContext())
.register("/myapp", (request, response, context) -> {
response.setEntity(new StringEntity("OK", StandardCharsets.US_ASCII));
response.setCode(HttpStatus.SC_OK);
}).create();
localServer.start();
}
@Override
public void cleanupResources() {
if (localServer != null) {
localServer.stop();
}
}
@Test
public void testPoolStats() throws Exception {
// turn on registering jmx always so the producer is also registered
context.getManagementStrategy().getManagementAgent().setRegisterAlways(true);
String uri = "http://localhost:" + localServer.getLocalPort() + "/myapp";
Exchange out = template.request(uri, exchange -> exchange.getIn().setBody("Hello World"));
assertNotNull(out);
assertEquals("OK", out.getMessage().getBody(String.class));
// look up stats
HttpEndpoint http = context.getEndpoint(uri, HttpEndpoint.class);
assertNotNull(http);
int max = http.getClientConnectionsPoolStatsMax();
int avail = http.getClientConnectionsPoolStatsAvailable();
int leased = http.getClientConnectionsPoolStatsLeased();
int pending = http.getClientConnectionsPoolStatsPending();
assertEquals(200, max);
assertEquals(1, avail);
assertEquals(0, leased);
assertEquals(0, pending);
// should be in JMX too
MBeanServer mbeanServer = getMBeanServer();
String id = context.getManagementName();
ObjectName on = ObjectName.getInstance("org.apache.camel:context=" + id + ",type=endpoints,name=\"" + uri + "\"");
assertTrue(mbeanServer.isRegistered(on));
max = (int) mbeanServer.getAttribute(on, "ClientConnectionsPoolStatsMax");
assertEquals(200, max);
avail = (int) mbeanServer.getAttribute(on, "ClientConnectionsPoolStatsAvailable");
assertEquals(1, avail);
leased = (int) mbeanServer.getAttribute(on, "ClientConnectionsPoolStatsLeased");
assertEquals(0, leased);
pending = (int) mbeanServer.getAttribute(on, "ClientConnectionsPoolStatsPending");
assertEquals(0, pending);
}
protected MBeanServer getMBeanServer() {
return context.getManagementStrategy().getManagementAgent().getMBeanServer();
}
}
| ManagedHttpProducerPoolStatsTest |
java | apache__camel | components/camel-aws/camel-aws-secrets-manager/src/test/java/org/apache/camel/component/aws/secretsmanager/integration/SecretsManagerRotateSecretProducerLocalstackIT.java | {
"start": 1940,
"end": 4031
} | class ____ extends AwsSecretsManagerBaseTest {
@EndpointInject("mock:result")
private MockEndpoint mock;
private String arn;
@Order(1)
@Test
public void createSecretTest() {
mock.expectedMessageCount(1);
final Exchange exchange = template.request("direct:createSecret", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(SecretsManagerConstants.SECRET_NAME, "TestSecret4");
exchange.getIn().setBody("Body");
}
});
CreateSecretResponse resultGet = (CreateSecretResponse) exchange.getMessage().getBody();
assertNotNull(resultGet);
arn = resultGet.arn();
resultGet.name();
}
@Disabled("This test probably needs a lambda code to run correctly")
@Order(2)
@Test
public void testRotateSecret() {
final Exchange exchange = template.request("direct:rotateSecret", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(SecretsManagerConstants.SECRET_ID, arn);
exchange.getIn().setHeader(SecretsManagerConstants.LAMBDA_ROTATION_FUNCTION_ARN, arn);
}
});
RotateSecretResponse resultRotate = (RotateSecretResponse) exchange.getMessage().getBody();
assertNotNull(resultRotate);
assertTrue(resultRotate.sdkHttpResponse().isSuccessful());
assertEquals("TestSecret4", resultRotate.name());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:createSecret")
.to("aws-secrets-manager://test?operation=createSecret");
from("direct:rotateSecret")
.to("aws-secrets-manager://test?operation=rotateSecret")
.to("mock:result");
}
};
}
}
| SecretsManagerRotateSecretProducerLocalstackIT |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.