language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
quarkusio__quarkus
|
integration-tests/opentelemetry-reactive/src/test/java/io/quarkus/it/opentelemetry/reactive/OpenTelemetryReactiveTest.java
|
{
"start": 1733,
"end": 18472
}
|
class ____ {
@BeforeEach
@AfterEach
void reset() {
await().atMost(Duration.ofSeconds(30L)).until(() -> {
// make sure spans are cleared
List<Map<String, Object>> spans = getSpans();
if (!spans.isEmpty()) {
given().get("/reset").then().statusCode(HTTP_OK);
}
return spans.isEmpty();
});
}
@Test
void get() {
given()
.when()
.queryParam("name", "Naruto")
.get("/reactive")
.then()
.statusCode(200)
.body(equalTo("Hello Naruto"));
await().atMost(5, SECONDS).until(() -> getSpans().size() == 2);
List<Map<String, Object>> spans = getSpans();
assertEquals(2, spans.size());
assertEquals(spans.get(0).get("traceId"), spans.get(1).get("traceId"));
}
@Test
void helloGetUniDelayTest() {
given()
.when()
.get("/reactive/hello-get-uni-delay")
.then()
.statusCode(200)
.body(equalTo("helloGetUniDelay"));
await().atMost(5, SECONDS).until(() -> getSpans().size() == 2);
Map<String, Object> parent = getSpanByKindAndParentId(getSpans(), SERVER, "0000000000000000");
assertEquals("GET /reactive/hello-get-uni-delay", parent.get("name"));
Map<String, Object> child = getSpanByKindAndParentId(getSpans(), INTERNAL, parent.get("spanId"));
assertEquals("helloGetUniDelay", child.get("name"));
assertEquals(child.get("traceId"), parent.get("traceId"));
}
@Test
void helloGetUniExecutorTest() {
given()
.when()
.get("/reactive/hello-get-uni-executor")
.then()
.statusCode(200)
.body(equalTo("helloGetUniExecutor"));
await().atMost(5, SECONDS).until(() -> getSpans().size() == 2);
Map<String, Object> parent = getSpanByKindAndParentId(getSpans(), SERVER, "0000000000000000");
assertEquals("GET /reactive/hello-get-uni-executor", parent.get("name"));
Map<String, Object> child = getSpanByKindAndParentId(getSpans(), INTERNAL, parent.get("spanId"));
assertEquals("helloGetUniExecutor", child.get("name"));
assertEquals(child.get("traceId"), parent.get("traceId"));
}
@Test
void blockingException() {
given()
.when()
.get("/reactive/blockingException")
.then()
.statusCode(500);
assertExceptionRecorded();
}
@Test
void reactiveException() {
given()
.when()
.get("/reactive/reactiveException")
.then()
.statusCode(500);
assertExceptionRecorded();
}
private static void assertExceptionRecorded() {
await().atMost(5, SECONDS).until(() -> getExceptionEventData().size() == 1);
assertThat(getExceptionEventData()).singleElement().satisfies(s -> {
assertThat(s).contains("dummy");
});
}
@Test
void post() {
given()
.when()
.body("Naruto")
.post("/reactive")
.then()
.statusCode(200)
.body(equalTo("Hello Naruto"));
await().atMost(5, SECONDS).until(() -> getSpans().size() == 2);
List<Map<String, Object>> spans = getSpans();
assertEquals(2, spans.size());
assertEquals(spans.get(0).get("traceId"), spans.get(1).get("traceId"));
}
@Test
void multipleUsingChain() {
given()
.when()
.get("/reactive/multiple-chain")
.then()
.statusCode(200)
.body(equalTo("Hello Naruto and Hello Goku"));
await().atMost(5, SECONDS).until(() -> getSpans().size() == 7);
List<Map<String, Object>> spans = getSpans();
assertEquals(7, spans.size());
assertEquals(1, spans.stream().map(map -> map.get("traceId")).collect(toSet()).size());
// First span is the call getting into the server. It does not have a parent span.
Map<String, Object> parent = getSpanByKindAndParentId(spans, SERVER, "0000000000000000");
// We should get 2 client spans originated by the server
List<Map<String, Object>> clientSpans = getSpansByKindAndParentId(spans, CLIENT, parent.get("spanId"));
assertEquals(2, clientSpans.size());
// Each client calls the server and programmatically create a span, so each have a server and an internal span
// Naruto Span
Optional<Map<String, Object>> narutoSpan = clientSpans.stream()
.filter(map -> ((String) ((Map<?, ?>) map.get("attributes")).get(URL_FULL.getKey())).contains("Naruto"))
.findFirst();
assertTrue(narutoSpan.isPresent());
Map<String, Object> naruto = narutoSpan.get();
Map<String, Object> narutoServer = getSpanByKindAndParentId(spans, SERVER, naruto.get("spanId"));
assertEquals("/reactive", ((Map<?, ?>) narutoServer.get("attributes")).get(URL_PATH.getKey()));
assertEquals("name=Naruto", ((Map<?, ?>) narutoServer.get("attributes")).get(URL_QUERY.getKey()));
Map<String, Object> narutoInternal = getSpanByKindAndParentId(spans, INTERNAL, narutoServer.get("spanId"));
assertEquals("helloGet", narutoInternal.get("name"));
// Goku Span
Optional<Map<String, Object>> gokuSpan = clientSpans.stream()
.filter(map -> ((String) ((Map<?, ?>) map.get("attributes")).get(URL_FULL.getKey())).contains("Goku"))
.findFirst();
assertTrue(gokuSpan.isPresent());
Map<String, Object> goku = gokuSpan.get();
Map<String, Object> gokuServer = getSpanByKindAndParentId(spans, SERVER, goku.get("spanId"));
assertEquals("/reactive", ((Map<?, ?>) gokuServer.get("attributes")).get(URL_PATH.getKey()));
assertEquals("name=Goku", ((Map<?, ?>) gokuServer.get("attributes")).get(URL_QUERY.getKey()));
Map<String, Object> gokuInternal = getSpanByKindAndParentId(spans, INTERNAL, gokuServer.get("spanId"));
assertEquals("helloGet", gokuInternal.get("name"));
}
@Test
void multipleUsingCombine() {
given()
.when()
.get("/reactive/multiple-combine")
.then()
.statusCode(200)
.body(equalTo("Hello Naruto and Hello Goku"));
await().atMost(5, SECONDS).until(() -> getSpans().size() == 7);
List<Map<String, Object>> spans = getSpans();
assertEquals(7, spans.size());
assertEquals(1, spans.stream().map(map -> map.get("traceId")).collect(toSet()).size());
// First span is the call getting into the server. It does not have a parent span.
Map<String, Object> parent = getSpanByKindAndParentId(spans, SERVER, "0000000000000000");
// We should get 2 client spans originated by the server
List<Map<String, Object>> clientSpans = getSpansByKindAndParentId(spans, CLIENT, parent.get("spanId"));
assertEquals(2, clientSpans.size());
// Each client calls the server and programmatically create a span, so each have a server and an internal span
// Naruto Span
Optional<Map<String, Object>> narutoSpan = clientSpans.stream()
.filter(map -> ((String) ((Map<?, ?>) map.get("attributes")).get(URL_FULL.getKey())).contains("Naruto"))
.findFirst();
assertTrue(narutoSpan.isPresent());
Map<String, Object> naruto = narutoSpan.get();
Map<String, Object> narutoServer = getSpanByKindAndParentId(spans, SERVER, naruto.get("spanId"));
assertEquals("/reactive", ((Map<?, ?>) narutoServer.get("attributes")).get(URL_PATH.getKey()));
assertEquals("name=Naruto", ((Map<?, ?>) narutoServer.get("attributes")).get(URL_QUERY.getKey()));
Map<String, Object> narutoInternal = getSpanByKindAndParentId(spans, INTERNAL, narutoServer.get("spanId"));
assertEquals("helloGet", narutoInternal.get("name"));
// Goku Span
Optional<Map<String, Object>> gokuSpan = clientSpans.stream()
.filter(map -> ((String) ((Map<?, ?>) map.get("attributes")).get(URL_FULL.getKey())).contains("Goku"))
.findFirst();
assertTrue(gokuSpan.isPresent());
Map<String, Object> goku = gokuSpan.get();
Map<String, Object> gokuServer = getSpanByKindAndParentId(spans, SERVER, goku.get("spanId"));
assertEquals("/reactive", ((Map<?, ?>) gokuServer.get("attributes")).get(URL_PATH.getKey()));
assertEquals("name=Goku", ((Map<?, ?>) gokuServer.get("attributes")).get(URL_QUERY.getKey()));
Map<String, Object> gokuInternal = getSpanByKindAndParentId(spans, INTERNAL, gokuServer.get("spanId"));
assertEquals("helloGet", gokuInternal.get("name"));
}
@Test
void multipleUsingCombineDifferentPaths() {
given()
.when()
.get("/reactive/multiple-combine-different-paths")
.then()
.statusCode(200)
.body(equalTo("helloGetUniDelay and Hello Naruto and Hello Goku and helloGetUniExecutor"));
await().atMost(5, SECONDS).until(() -> getSpans().size() == 13);
List<Map<String, Object>> spans = getSpans();
assertEquals(13, spans.size());
assertEquals(1, spans.stream().map(map -> map.get("traceId")).collect(toSet()).size());
// First span is the call getting into the server. It does not have a parent span.
Map<String, Object> parent = getSpanByKindAndParentId(spans, SERVER, "0000000000000000");
// We should get 2 client spans originated by the server
List<Map<String, Object>> clientSpans = getSpansByKindAndParentId(spans, CLIENT, parent.get("spanId"));
assertEquals(4, clientSpans.size());
// Each client calls the server and programmatically create a span, so each have a server and an internal span
// helloGetUniDelay Span
Optional<Map<String, Object>> helloGetUniDelaySpan = clientSpans.stream()
.filter(map -> ((String) ((Map<?, ?>) map.get("attributes")).get(URL_FULL.getKey()))
.contains("/hello-get-uni-delay"))
.findFirst();
assertTrue(helloGetUniDelaySpan.isPresent());
Map<String, Object> helloGetUniDelay = helloGetUniDelaySpan.get();
assertEquals("GET /reactive/hello-get-uni-delay", helloGetUniDelay.get("name"));
Map<String, Object> helloGetUniDelayServer = getSpanByKindAndParentId(spans, SERVER, helloGetUniDelay.get("spanId"));
assertEquals("/reactive/hello-get-uni-delay",
((Map<?, ?>) helloGetUniDelayServer.get("attributes")).get(URL_PATH.getKey()));
Map<String, Object> helloGetUniDelayInternal = getSpanByKindAndParentId(spans, INTERNAL,
helloGetUniDelayServer.get("spanId"));
assertEquals("helloGetUniDelay", helloGetUniDelayInternal.get("name"));
// Naruto Span
Optional<Map<String, Object>> narutoSpan = clientSpans.stream()
.filter(map -> ((String) ((Map<?, ?>) map.get("attributes")).get(URL_FULL.getKey())).contains("Naruto"))
.findFirst();
assertTrue(narutoSpan.isPresent());
Map<String, Object> naruto = narutoSpan.get();
assertEquals("GET /reactive", naruto.get("name"));
Map<String, Object> narutoServer = getSpanByKindAndParentId(spans, SERVER, naruto.get("spanId"));
assertEquals("/reactive", ((Map<?, ?>) narutoServer.get("attributes")).get(URL_PATH.getKey()));
assertEquals("name=Naruto", ((Map<?, ?>) narutoServer.get("attributes")).get(URL_QUERY.getKey()));
Map<String, Object> narutoInternal = getSpanByKindAndParentId(spans, INTERNAL, narutoServer.get("spanId"));
assertEquals("helloGet", narutoInternal.get("name"));
// Goku Span
Optional<Map<String, Object>> gokuSpan = clientSpans.stream()
.filter(map -> ((String) ((Map<?, ?>) map.get("attributes")).get(URL_FULL.getKey())).contains("Goku"))
.findFirst();
assertTrue(gokuSpan.isPresent());
Map<String, Object> goku = gokuSpan.get();
assertEquals("GET /reactive", goku.get("name"));
Map<String, Object> gokuServer = getSpanByKindAndParentId(spans, SERVER, goku.get("spanId"));
assertEquals("/reactive", ((Map<?, ?>) gokuServer.get("attributes")).get(URL_PATH.getKey()));
assertEquals("name=Goku", ((Map<?, ?>) gokuServer.get("attributes")).get(URL_QUERY.getKey()));
Map<String, Object> gokuInternal = getSpanByKindAndParentId(spans, INTERNAL, gokuServer.get("spanId"));
assertEquals("helloGet", gokuInternal.get("name"));
// helloGetUniDelay Span
Optional<Map<String, Object>> helloGetUniExecutorSpan = clientSpans.stream()
.filter(map -> ((String) ((Map<?, ?>) map.get("attributes")).get(URL_FULL.getKey()))
.contains("/hello-get-uni-executor"))
.findFirst();
assertTrue(helloGetUniExecutorSpan.isPresent());
Map<String, Object> helloGetUniExecutor = helloGetUniExecutorSpan.get();
assertEquals("GET /reactive/hello-get-uni-executor", helloGetUniExecutor.get("name"));
Map<String, Object> helloGetUniExecutorServer = getSpanByKindAndParentId(spans, SERVER,
helloGetUniExecutor.get("spanId"));
assertEquals("/reactive/hello-get-uni-executor",
((Map<?, ?>) helloGetUniExecutorServer.get("attributes")).get(URL_PATH.getKey()));
Map<String, Object> helloGetUniExecutorInternal = getSpanByKindAndParentId(spans, INTERNAL,
helloGetUniExecutorServer.get("spanId"));
assertEquals("helloGetUniExecutor", helloGetUniExecutorInternal.get("name"));
}
@Test
public void securedInvalidCredential() {
given().auth().preemptive().basic("scott", "reader2").when().get("/foo/secured/item/something")
.then()
.statusCode(401);
await().atMost(5, SECONDS).until(() -> getSpans().size() == 1);
assertThat(getSpans()).singleElement().satisfies(m -> {
assertThat(m).extractingByKey("name").isEqualTo("GET /{dummy}/secured/item/{value}");
assertEvent(m, SecurityEventUtil.AUTHN_FAILURE_EVENT_NAME);
});
}
@Test
public void securedProperCredentials() {
given().auth().preemptive().basic("scott", "reader").when().get("/foo/secured/item/something")
.then()
.statusCode(200);
await().atMost(5, SECONDS).until(() -> getSpans().size() == 1);
assertThat(getSpans()).singleElement().satisfies(m -> {
assertThat(m).extractingByKey("name").isEqualTo("GET /{dummy}/secured/item/{value}");
assertEvent(m, SecurityEventUtil.AUTHN_SUCCESS_EVENT_NAME, SecurityEventUtil.AUTHZ_SUCCESS_EVENT_NAME);
});
}
private static void assertEvent(Map<String, Object> spanData, String... expectedEventNames) {
String spanName = (String) spanData.get("name");
var events = (List) spanData.get("events");
Assertions.assertEquals(expectedEventNames.length, events.size());
for (String expectedEventName : expectedEventNames) {
boolean foundEvent = events.stream().anyMatch(m -> expectedEventName.equals(((Map) m).get("name")));
assertTrue(foundEvent, "Span '%s' did not contain event '%s'".formatted(spanName, expectedEventName));
assertEventAttributes(spanName, expectedEventName);
}
}
private static void assertEventAttributes(String spanName, String eventName) {
var attrs = getSpanEventAttrs(spanName, eventName);
switch (eventName) {
case SecurityEventUtil.AUTHN_FAILURE_EVENT_NAME:
assertEquals(AuthenticationFailedException.class.getName(), attrs.get(SecurityEventUtil.FAILURE_NAME));
break;
case SecurityEventUtil.AUTHN_SUCCESS_EVENT_NAME:
case SecurityEventUtil.AUTHZ_SUCCESS_EVENT_NAME:
assertEquals("scott", attrs.get(SecurityEventUtil.SECURITY_IDENTITY_PRINCIPAL));
assertEquals(Boolean.FALSE, attrs.get(SecurityEventUtil.SECURITY_IDENTITY_IS_ANONYMOUS));
break;
default:
Assertions.fail("Unknown event name " + eventName);
}
}
}
|
OpenTelemetryReactiveTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/embeddings/MistralEmbeddingsModel.java
|
{
"start": 1345,
"end": 3427
}
|
class ____ extends MistralModel {
public MistralEmbeddingsModel(
String inferenceEntityId,
TaskType taskType,
String service,
Map<String, Object> serviceSettings,
ChunkingSettings chunkingSettings,
@Nullable Map<String, Object> secrets,
ConfigurationParseContext context
) {
this(
inferenceEntityId,
taskType,
service,
MistralEmbeddingsServiceSettings.fromMap(serviceSettings, context),
chunkingSettings,
DefaultSecretSettings.fromMap(secrets)
);
}
public MistralEmbeddingsModel(MistralEmbeddingsModel model, MistralEmbeddingsServiceSettings serviceSettings) {
super(model, serviceSettings);
setPropertiesFromServiceSettings(serviceSettings);
}
private void setPropertiesFromServiceSettings(MistralEmbeddingsServiceSettings serviceSettings) {
this.rateLimitSettings = serviceSettings.rateLimitSettings();
setEndpointUrl();
}
private void setEndpointUrl() {
try {
this.uri = new URI(API_EMBEDDINGS_PATH);
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
}
public MistralEmbeddingsModel(
String inferenceEntityId,
TaskType taskType,
String service,
MistralEmbeddingsServiceSettings serviceSettings,
ChunkingSettings chunkingSettings,
DefaultSecretSettings secrets
) {
super(
new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, EmptyTaskSettings.INSTANCE, chunkingSettings),
new ModelSecrets(secrets)
);
setPropertiesFromServiceSettings(serviceSettings);
}
@Override
public MistralEmbeddingsServiceSettings getServiceSettings() {
return (MistralEmbeddingsServiceSettings) super.getServiceSettings();
}
@Override
public ExecutableAction accept(MistralActionVisitor creator) {
return creator.create(this);
}
}
|
MistralEmbeddingsModel
|
java
|
elastic__elasticsearch
|
server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java
|
{
"start": 1755,
"end": 10284
}
|
class ____ extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(MockFSIndexStore.TestPlugin.class);
}
public void testRandomDirectoryIOExceptions() throws IOException, InterruptedException, ExecutionException {
String mapping = Strings.toString(
XContentFactory.jsonBuilder()
.startObject()
.startObject("properties")
.startObject("test")
.field("type", "keyword")
.endObject()
.endObject()
.endObject()
);
final double exceptionRate;
final double exceptionOnOpenRate;
if (frequently()) {
if (randomBoolean()) {
if (randomBoolean()) {
exceptionOnOpenRate = 1.0 / between(5, 100);
exceptionRate = 0.0d;
} else {
exceptionRate = 1.0 / between(5, 100);
exceptionOnOpenRate = 0.0d;
}
} else {
exceptionOnOpenRate = 1.0 / between(5, 100);
exceptionRate = 1.0 / between(5, 100);
}
} else {
// rarely no exception
exceptionRate = 0d;
exceptionOnOpenRate = 0d;
}
final boolean createIndexWithoutErrors = randomBoolean();
int numInitialDocs = 0;
if (createIndexWithoutErrors) {
Settings.Builder settings = Settings.builder().put("index.number_of_replicas", numberOfReplicas());
logger.info("creating index: [test] using settings: [{}]", settings.build());
indicesAdmin().prepareCreate("test").setSettings(settings).setMapping(mapping).get();
numInitialDocs = between(10, 100);
ensureGreen();
for (int i = 0; i < numInitialDocs; i++) {
prepareIndex("test").setId("init" + i).setSource("test", "init").get();
}
indicesAdmin().prepareRefresh("test").execute().get();
indicesAdmin().prepareFlush("test").execute().get();
indicesAdmin().prepareClose("test").execute().get();
indicesAdmin().prepareOpen("test").execute().get();
} else {
Settings.Builder settings = Settings.builder()
.put("index.number_of_replicas", randomIntBetween(0, 1))
.put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false)
.put(MockFSDirectoryFactory.RANDOM_IO_EXCEPTION_RATE_SETTING.getKey(), exceptionRate)
// we cannot expect that the index will be valid
.put(MockFSDirectoryFactory.RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING.getKey(), exceptionOnOpenRate);
logger.info("creating index: [test] using settings: [{}]", settings.build());
indicesAdmin().prepareCreate("test").setSettings(settings).setMapping(mapping).get();
}
ClusterHealthResponse clusterHealthResponse = clusterAdmin()
// it's OK to timeout here
.health(
new ClusterHealthRequest(TEST_REQUEST_TIMEOUT, new String[] {}).waitForYellowStatus()
.masterNodeTimeout(TimeValue.timeValueSeconds(5))
.timeout(TimeValue.timeValueSeconds(5))
)
.get();
final int numDocs;
final boolean expectAllShardsFailed;
if (clusterHealthResponse.isTimedOut()) {
/* some seeds just won't let you create the index at all and we enter a ping-pong mode
* trying one node after another etc. that is ok but we need to make sure we don't wait
* forever when indexing documents so we set numDocs = 1 and expecte all shards to fail
* when we search below.*/
logger.info("ClusterHealth timed out - only index one doc and expect searches to fail");
numDocs = 1;
expectAllShardsFailed = true;
} else {
numDocs = between(10, 100);
expectAllShardsFailed = false;
}
int numCreated = 0;
boolean[] added = new boolean[numDocs];
for (int i = 0; i < numDocs; i++) {
added[i] = false;
try {
DocWriteResponse indexResponse = prepareIndex("test").setId(Integer.toString(i))
.setTimeout(TimeValue.timeValueSeconds(1))
.setSource("test", English.intToEnglish(i))
.get();
if (indexResponse.getResult() == DocWriteResponse.Result.CREATED) {
numCreated++;
added[i] = true;
}
} catch (ElasticsearchException ex) {}
}
ESIntegTestCase.NumShards numShards = getNumShards("test");
logger.info("Start Refresh");
// don't assert on failures here
final BroadcastResponse refreshResponse = indicesAdmin().prepareRefresh("test").execute().get();
final boolean refreshFailed = refreshResponse.getShardFailures().length != 0 || refreshResponse.getFailedShards() != 0;
logger.info(
"Refresh failed [{}] numShardsFailed: [{}], shardFailuresLength: [{}], successfulShards: [{}], totalShards: [{}] ",
refreshFailed,
refreshResponse.getFailedShards(),
refreshResponse.getShardFailures().length,
refreshResponse.getSuccessfulShards(),
refreshResponse.getTotalShards()
);
final int numSearches = scaledRandomIntBetween(10, 20);
final int finalNumCreated = numCreated;
final int finalNumInitialDocs = numInitialDocs;
// we don't check anything here really just making sure we don't leave any open files or a broken index behind.
for (int i = 0; i < numSearches; i++) {
try {
int docToQuery = between(0, numDocs - 1);
int expectedResults = added[docToQuery] ? 1 : 0;
logger.info("Searching for [test:{}]", English.intToEnglish(docToQuery));
assertResponse(
prepareSearch().setQuery(QueryBuilders.matchQuery("test", English.intToEnglish(docToQuery))).setSize(expectedResults),
response -> {
logger.info("Successful shards: [{}] numShards: [{}]", response.getSuccessfulShards(), numShards.numPrimaries);
if (response.getSuccessfulShards() == numShards.numPrimaries && refreshFailed == false) {
assertResultsAndLogOnFailure(expectedResults, response);
}
}
);
// check match all
assertResponse(
prepareSearch().setQuery(QueryBuilders.matchAllQuery())
.setSize(numCreated + numInitialDocs)
.addSort("_uid", SortOrder.ASC),
response -> {
logger.info(
"Match all Successful shards: [{}] numShards: [{}]",
response.getSuccessfulShards(),
numShards.numPrimaries
);
if (response.getSuccessfulShards() == numShards.numPrimaries && refreshFailed == false) {
assertResultsAndLogOnFailure(finalNumCreated + finalNumInitialDocs, response);
}
}
);
} catch (SearchPhaseExecutionException ex) {
logger.info("SearchPhaseException: [{}]", ex.getMessage());
// if a scheduled refresh or flush fails all shards we see all shards failed here
if ((expectAllShardsFailed
|| refreshResponse.getSuccessfulShards() == 0
|| ex.getMessage().contains("all shards failed")) == false) {
throw ex;
}
}
}
if (createIndexWithoutErrors) {
// check the index still contains the records that we indexed without errors
indicesAdmin().prepareClose("test").execute().get();
indicesAdmin().prepareOpen("test").execute().get();
ensureGreen();
assertHitCountAndNoFailures(prepareSearch().setQuery(QueryBuilders.matchQuery("test", "init")), numInitialDocs);
}
}
}
|
SearchWithRandomIOExceptionsIT
|
java
|
micronaut-projects__micronaut-core
|
json-core/src/main/java/io/micronaut/json/bind/JsonBeanPropertyBinderExceptionHandler.java
|
{
"start": 888,
"end": 1050
}
|
interface ____ converts json binding exceptions to more specific {@link ConversionErrorException}s.
*
* @author Jonas Konrad
* @since 3.1
*/
@Internal
public
|
that
|
java
|
alibaba__fastjson
|
src/test/java/com/derbysoft/spitfire/fastjson/dto/HotelRefDTO.java
|
{
"start": 53,
"end": 1142
}
|
class ____ extends AbstractDTO{
private String code;
private String name;
private String chainCode;
private String brandCode;
private TPAExtensionsDTO tpaExtensions;
public HotelRefDTO() {
}
public HotelRefDTO(String code) {
this.code = code;
}
public String getCode() {
return code;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getChainCode() {
return chainCode;
}
public void setChainCode(String chainCode) {
this.chainCode = chainCode;
}
public String getBrandCode() {
return brandCode;
}
public void setBrandCode(String brandCode) {
this.brandCode = brandCode;
}
public void setCode(String code) {
this.code = code;
}
public TPAExtensionsDTO getTpaExtensions() {
return tpaExtensions;
}
public void setTpaExtensions(TPAExtensionsDTO tpaExtensions) {
this.tpaExtensions = tpaExtensions;
}
}
|
HotelRefDTO
|
java
|
spring-projects__spring-framework
|
spring-messaging/src/main/java/org/springframework/messaging/simp/user/UserDestinationMessageHandler.java
|
{
"start": 2357,
"end": 8146
}
|
class ____ implements MessageHandler, SmartLifecycle {
private static final Log logger = SimpLogging.forLogName(UserDestinationMessageHandler.class);
private final SubscribableChannel clientInboundChannel;
private final SubscribableChannel brokerChannel;
private final UserDestinationResolver destinationResolver;
private final SendHelper sendHelper;
private @Nullable BroadcastHandler broadcastHandler;
private @Nullable MessageHeaderInitializer headerInitializer;
private volatile boolean running;
private @Nullable Integer phase;
private final Object lifecycleMonitor = new Object();
/**
* Create an instance with the given client and broker channels to subscribe to,
* and then send resolved messages to the broker channel.
* @param clientInboundChannel messages received from clients.
* @param brokerChannel messages sent to the broker.
* @param destinationResolver the resolver for "user" destinations.
*/
public UserDestinationMessageHandler(
SubscribableChannel clientInboundChannel, SubscribableChannel brokerChannel,
UserDestinationResolver destinationResolver) {
Assert.notNull(clientInboundChannel, "'clientInChannel' must not be null");
Assert.notNull(brokerChannel, "'brokerChannel' must not be null");
Assert.notNull(destinationResolver, "resolver must not be null");
this.clientInboundChannel = clientInboundChannel;
this.brokerChannel = brokerChannel;
this.sendHelper = new SendHelper(clientInboundChannel, brokerChannel);
this.destinationResolver = destinationResolver;
}
/**
* Return the configured {@link UserDestinationResolver}.
*/
public UserDestinationResolver getUserDestinationResolver() {
return this.destinationResolver;
}
/**
* Set a destination to broadcast messages to that remain unresolved because
* the user is not connected. In a multi-application server scenario this
* gives other application servers a chance to try.
* <p>By default this is not set.
* @param destination the target destination.
*/
public void setBroadcastDestination(@Nullable String destination) {
this.broadcastHandler = (StringUtils.hasText(destination) ?
new BroadcastHandler(this.sendHelper.getMessagingTemplate(), destination) : null);
}
/**
* Return the configured destination for unresolved messages.
*/
public @Nullable String getBroadcastDestination() {
return (this.broadcastHandler != null ? this.broadcastHandler.getBroadcastDestination() : null);
}
/**
* Return the messaging template used to send resolved messages to the
* broker channel.
*/
public MessageSendingOperations<String> getBrokerMessagingTemplate() {
return this.sendHelper.getMessagingTemplate();
}
/**
* Configure a custom {@link MessageHeaderInitializer} to initialize the
* headers of resolved target messages.
* <p>By default this is not set.
*/
public void setHeaderInitializer(@Nullable MessageHeaderInitializer headerInitializer) {
this.headerInitializer = headerInitializer;
}
/**
* Return the configured header initializer.
*/
public @Nullable MessageHeaderInitializer getHeaderInitializer() {
return this.headerInitializer;
}
/**
* Set the phase that this handler should run in.
* <p>By default, this is {@link SmartLifecycle#DEFAULT_PHASE}, but with
* {@code @EnableWebSocketMessageBroker} configuration it is set to 0.
* @since 6.1.4
*/
public void setPhase(int phase) {
this.phase = phase;
}
@Override
public int getPhase() {
return (this.phase != null ? this.phase : SmartLifecycle.super.getPhase());
}
@Override
public final void start() {
synchronized (this.lifecycleMonitor) {
this.clientInboundChannel.subscribe(this);
this.brokerChannel.subscribe(this);
this.running = true;
}
}
@Override
public final void stop() {
synchronized (this.lifecycleMonitor) {
this.running = false;
this.clientInboundChannel.unsubscribe(this);
this.brokerChannel.unsubscribe(this);
}
}
@Override
public final void stop(Runnable callback) {
synchronized (this.lifecycleMonitor) {
stop();
callback.run();
}
}
@Override
public final boolean isRunning() {
return this.running;
}
@Override
public void handleMessage(Message<?> sourceMessage) throws MessagingException {
Message<?> message = sourceMessage;
if (this.broadcastHandler != null) {
message = this.broadcastHandler.preHandle(sourceMessage);
if (message == null) {
return;
}
}
UserDestinationResult result = this.destinationResolver.resolveDestination(message);
if (result == null) {
this.sendHelper.checkDisconnect(message);
return;
}
if (result.getTargetDestinations().isEmpty()) {
if (logger.isTraceEnabled()) {
logger.trace("No active sessions for user destination: " + result.getSourceDestination());
}
if (this.broadcastHandler != null) {
this.broadcastHandler.handleUnresolved(message);
}
return;
}
SimpMessageHeaderAccessor accessor = SimpMessageHeaderAccessor.wrap(message);
initHeaders(accessor);
accessor.setNativeHeader(SimpMessageHeaderAccessor.ORIGINAL_DESTINATION, result.getSubscribeDestination());
accessor.setLeaveMutable(true);
message = MessageBuilder.createMessage(message.getPayload(), accessor.getMessageHeaders());
if (logger.isTraceEnabled()) {
logger.trace("Translated " + result.getSourceDestination() + " -> " + result.getTargetDestinations());
}
this.sendHelper.send(result, message);
}
private void initHeaders(SimpMessageHeaderAccessor headerAccessor) {
if (getHeaderInitializer() != null) {
getHeaderInitializer().initHeaders(headerAccessor);
}
}
@Override
public String toString() {
return "UserDestinationMessageHandler[" + this.destinationResolver + "]";
}
private static
|
UserDestinationMessageHandler
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/pagination/Entry.java
|
{
"start": 193,
"end": 1232
}
|
class ____ {
private int id;
private String name;
private Set<Tag> tags = new HashSet<Tag>();
public Entry() {
}
public Entry(String name) {
this.name = name;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Set<Tag> getTags() {
return tags;
}
public void setTags(Set<Tag> tags) {
this.tags = tags;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((name == null) ? 0 : name.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
Entry other = (Entry) obj;
if (name == null) {
if (other.name != null) return false;
}
else if (!name.equals(other.name)) return false;
return true;
}
@Override
public String toString() {
return getName();
}
}
|
Entry
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueuePlacementPolicy.java
|
{
"start": 2454,
"end": 2599
}
|
class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(QueuePlacementPolicy.class);
// Simple private
|
QueuePlacementPolicy
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/columnar/vector/heap/HeapBooleanVector.java
|
{
"start": 1102,
"end": 1976
}
|
class ____ extends AbstractHeapVector implements WritableBooleanVector {
private static final long serialVersionUID = 4131239076731313596L;
public boolean[] vector;
public HeapBooleanVector(int len) {
super(len);
vector = new boolean[len];
}
@Override
public HeapIntVector reserveDictionaryIds(int capacity) {
throw new RuntimeException("HeapBooleanVector has no dictionary.");
}
@Override
public HeapIntVector getDictionaryIds() {
throw new RuntimeException("HeapBooleanVector has no dictionary.");
}
@Override
public boolean getBoolean(int i) {
return vector[i];
}
@Override
public void setBoolean(int i, boolean value) {
vector[i] = value;
}
@Override
public void fill(boolean value) {
Arrays.fill(vector, value);
}
}
|
HeapBooleanVector
|
java
|
apache__logging-log4j2
|
log4j-1.2-api/src/main/java/org/apache/log4j/Layout.java
|
{
"start": 1919,
"end": 3001
}
|
class ____
* <code>null</code>.
* @return The footer.
*/
public String getFooter() {
return null;
}
/**
* If the layout handles the throwable object contained within
* {@link LoggingEvent}, then the layout should return
* {@code false}. Otherwise, if the layout ignores throwable
* object, then the layout should return {@code true}.
* If ignoresThrowable is true, the appender is responsible for
* rendering the throwable.
* <p>
* The <a href="/log4j/1.2/apidocs/org/apache/log4j/SimpleLayout.html">SimpleLayout</a>,
* <a href="/log4j/1.2/apidocs/org/apache/log4j/TTCCLayout.html">TTCCLayout</a>,
* <a href="/log4j/1.2/apidocs/org/apache/log4j/PatternLayout.html">PatternLayout</a>
* all return {@code true}. The
* <a href="/log4j/1.2/apidocs/org/apache/log4j/xml/XMLLayout.html">XMLLayout</a>
* returns {@code false}.
* </p>
*
* @return true if the Layout ignores Throwables.
*
* @since 0.8.4
*/
public abstract boolean ignoresThrowable();
}
|
returns
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java
|
{
"start": 25698,
"end": 26773
}
|
class ____<T extends RescorerBuilder<T>> extends SearchExtensionSpec<T, CheckedFunction<XContentParser, T, IOException>> {
public RescorerSpec(ParseField name, Writeable.Reader<? extends T> reader, CheckedFunction<XContentParser, T, IOException> parser) {
super(name, reader, parser);
}
public RescorerSpec(String name, Writeable.Reader<? extends T> reader, CheckedFunction<XContentParser, T, IOException> parser) {
super(name, reader, parser);
}
}
/**
* Specification of search time behavior extension like a custom {@link ScoreFunction}.
*
* @param <W> the type of the main {@link NamedWriteable} for this spec. All specs have this but it isn't always *for* the same thing
* though, usually it is some sort of builder sent from the coordinating node to the data nodes executing the behavior
* @param <P> the type of the parser for this spec. The parser runs on the coordinating node, converting {@link XContent} into the
* behavior to execute
*/
|
RescorerSpec
|
java
|
google__error-prone
|
check_api/src/test/java/com/google/errorprone/util/ASTHelpersTest.java
|
{
"start": 21148,
"end": 22215
}
|
class ____ extends RuntimeException {}
void foo() throws RuntimeException, IllegalArgumentException, MyException,
Error, VerifyError {}
}
""");
TestScanner scanner = isCheckedExceptionTypeScanner(false);
tests.add(scanner);
assertCompiles(scanner);
}
/* Tests for ASTHelpers#getUpperBound */
private TestScanner getUpperBoundScanner(String expectedBound) {
return new TestScanner() {
@Override
public Void visitVariable(VariableTree tree, VisitorState state) {
setAssertionsComplete();
Type varType = ASTHelpers.getType(tree.getType());
assertThat(
ASTHelpers.getUpperBound(varType.getTypeArguments().get(0), state.getTypes())
.toString())
.isEqualTo(expectedBound);
return super.visitVariable(tree, state);
}
};
}
@Test
public void getUpperBoundConcreteType() {
writeFile(
"A.java",
"""
import java.lang.Number;
import java.util.List;
public
|
MyException
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ReencryptionUpdater.java
|
{
"start": 3416,
"end": 4557
}
|
class ____ {
private boolean submissionDone;
private LinkedList<Future> tasks;
private int numCheckpointed;
private int numFutureDone;
ZoneSubmissionTracker() {
submissionDone = false;
tasks = new LinkedList<>();
numCheckpointed = 0;
numFutureDone = 0;
}
void reset() {
submissionDone = false;
tasks.clear();
numCheckpointed = 0;
numFutureDone = 0;
}
LinkedList<Future> getTasks() {
return tasks;
}
void cancelAllTasks() {
if (!tasks.isEmpty()) {
LOG.info("Cancelling {} re-encryption tasks", tasks.size());
for (Future f : tasks) {
f.cancel(true);
}
}
}
void addTask(final Future task) {
tasks.add(task);
}
private boolean isCompleted() {
return submissionDone && tasks.isEmpty();
}
void setSubmissionDone() {
submissionDone = true;
}
}
/**
* Class representing the task for one batch of a re-encryption command. It
* also contains statistics about how far this single batch has been executed.
*/
static final
|
ZoneSubmissionTracker
|
java
|
spring-projects__spring-security
|
core/src/main/java/org/springframework/security/core/session/SessionInformation.java
|
{
"start": 1496,
"end": 2520
}
|
class ____ implements Serializable {
private static final long serialVersionUID = 620L;
private Date lastRequest;
private final Object principal;
private final String sessionId;
private boolean expired = false;
public SessionInformation(Object principal, String sessionId, Date lastRequest) {
Assert.notNull(principal, "Principal required");
Assert.hasText(sessionId, "SessionId required");
Assert.notNull(lastRequest, "LastRequest required");
this.principal = principal;
this.sessionId = sessionId;
this.lastRequest = lastRequest;
}
public void expireNow() {
this.expired = true;
}
public Date getLastRequest() {
return this.lastRequest;
}
public Object getPrincipal() {
return this.principal;
}
public String getSessionId() {
return this.sessionId;
}
public boolean isExpired() {
return this.expired;
}
/**
* Refreshes the internal lastRequest to the current date and time.
*/
public void refreshLastRequest() {
this.lastRequest = new Date();
}
}
|
SessionInformation
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/filter/FilterWitSubSelectFetchModeTest.java
|
{
"start": 1696,
"end": 4014
}
|
class ____ {
@BeforeEach
void setUp(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final Customer firstCustomer = new Customer( 1L, "First" );
final Customer secondCustomer = new Customer( 2L, "Second" );
final Customer thirdCustomer = new Customer( 3L, "Third" );
final Customer fourthCustomer = new Customer( 4L, "Fourth" );
final Customer fifthCustomer = new Customer( 5L, "Fifth" );
firstCustomer.addOrder( new CustomerOrder( 100L ) );
firstCustomer.addOrder( new CustomerOrder( 200L ) );
secondCustomer.addOrder( new CustomerOrder( 300L ) );
secondCustomer.addOrder( new CustomerOrder( 400L ) );
thirdCustomer.addOrder( new CustomerOrder( 500L ) );
thirdCustomer.addOrder( new CustomerOrder( 600L ) );
fourthCustomer.addOrder( new CustomerOrder( 700L ) );
fourthCustomer.addOrder( new CustomerOrder( 800L ) );
fifthCustomer.addOrder( new CustomerOrder( 900L ) );
fifthCustomer.addOrder( new CustomerOrder( 1000L ) );
session.persist( fifthCustomer );
session.persist( secondCustomer );
session.persist( thirdCustomer );
session.persist( fourthCustomer );
session.persist( fifthCustomer );
} );
}
@Test
void testFiltersAreApplied(SessionFactoryScope scope) {
scope.inTransaction( session -> {
session.enableFilter( "ID" ).setParameter( "id", 3L );
//noinspection removal
var result = session.createQuery( "from Customer order by id", Customer.class ).getResultList();
assertFalse( result.isEmpty() );
var customer = result.get( 0 );
assertThat( customer.getCustomerId(), is( 3L ) );
assertThat( customer.getOrders().size(), is( 2 ) );
var statistics = session.getStatistics();
assertThat( statistics.getEntityCount(), is( 9 ) );
var sfStatistics = session.getSessionFactory().getStatistics();
assertThat( sfStatistics.getCollectionFetchCount(), is( 1L ) );
assertThat( sfStatistics.getQueries().length, is(1 ) );
} );
}
@AfterEach
void tearDown(SessionFactoryScope scope) {
scope.dropData();
}
@SuppressWarnings("FieldMayBeFinal")
@Entity(name = "Customer")
@FilterDef(
name = "ID",
defaultCondition = "customerId >= :id",
parameters = {
@ParamDef(type = Long.class, name = "id")
}
)
@Filter(name = "ID")
public static
|
FilterWitSubSelectFetchModeTest
|
java
|
google__dagger
|
javatests/dagger/hilt/android/processor/internal/customtestapplication/CustomTestApplicationProcessorTest.java
|
{
"start": 10911,
"end": 11670
}
|
class ____ {}"))
.compile(
subject -> {
subject.hasErrorContaining(
"@CustomTestApplication does not support application classes (or super classes)"
+ " with @Inject constructors. Found test.BaseApplication with @Inject"
+ " constructors [BaseApplication()]");
});
}
@Test
public void withSuperclassInjectConstructor_fails() {
HiltCompilerTests.hiltCompiler(
HiltCompilerTests.javaSource(
"test.BaseApplication",
"package test;",
"",
"import android.app.Application;",
"import javax.inject.Inject;",
"",
"public
|
HiltTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/GraphInfoTransportAction.java
|
{
"start": 803,
"end": 1674
}
|
class ____ extends XPackInfoFeatureTransportAction {
private final boolean enabled;
private final XPackLicenseState licenseState;
@Inject
public GraphInfoTransportAction(
TransportService transportService,
ActionFilters actionFilters,
Settings settings,
XPackLicenseState licenseState
) {
super(XPackInfoFeatureAction.GRAPH.name(), transportService, actionFilters);
this.enabled = XPackSettings.GRAPH_ENABLED.get(settings);
this.licenseState = licenseState;
}
@Override
public String name() {
return XPackField.GRAPH;
}
@Override
public boolean available() {
return licenseState != null && Graph.GRAPH_FEATURE.checkWithoutTracking(licenseState);
}
@Override
public boolean enabled() {
return enabled;
}
}
|
GraphInfoTransportAction
|
java
|
apache__hadoop
|
hadoop-cloud-storage-project/hadoop-tos/src/main/java/org/apache/hadoop/fs/tosfs/common/Chain.java
|
{
"start": 3549,
"end": 4440
}
|
class ____<T extends Closeable> {
private final Deque<ItemFactory<T>> factories = Queues.newArrayDeque();
private Predicate<IOException> shouldContinue;
public Builder<T> addFirst(ItemFactory<T> factory) {
factories.addFirst(factory);
return this;
}
public Builder<T> addLast(ItemFactory<T> factory) {
factories.addLast(factory);
return this;
}
public Builder<T> shouldContinue(Predicate<IOException> continueCondition) {
this.shouldContinue = continueCondition;
return this;
}
public Chain<T> build() throws IOException {
Chain<T> chain = new Chain<>(factories, shouldContinue);
// Do nothing in the chain task to initialize the first item.
chain.run(item -> null);
return chain;
}
}
public static <T extends Closeable> Builder<T> builder() {
return new Builder<>();
}
}
|
Builder
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/issue_2300/Issue2343.java
|
{
"start": 713,
"end": 898
}
|
class ____ {
@JSONField(ordinal = 1)
public int f1;
@JSONField(ordinal = 0)
public int f2;
@JSONField(ordinal = 2)
public int f3;
}
}
|
A
|
java
|
google__error-prone
|
check_api/src/test/java/com/google/errorprone/util/ASTHelpersTest.java
|
{
"start": 14621,
"end": 15485
}
|
class ____ extends B {}");
TestScanner scanner =
new TestScanner() {
@Override
public Void visitMethod(MethodTree tree, VisitorState state) {
if (tree.getName().contentEquals("f")) {
assertMatch(
tree,
state,
(MethodTree t, VisitorState s) ->
hasAnnotation(t, "com.google.errorprone.util.InheritedAnnotation", s));
setAssertionsComplete();
}
return super.visitMethod(tree, state);
}
};
tests.add(scanner);
assertCompiles(scanner);
}
// verify that hasAnnotation(Symbol, String, VisitorState) uses binary names for inner classes
@Test
public void innerAnnotationType() {
writeFile(
"test/Lib.java",
"""
package test;
public
|
C
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/pool/DruidDataSourceTest_interrupt3.java
|
{
"start": 248,
"end": 1774
}
|
class ____ extends TestCase {
private DruidDataSource dataSource;
protected void setUp() throws Exception {
dataSource = new DruidDataSource();
dataSource.setUrl("jdbc:mock:xxx");
dataSource.setTestOnBorrow(false);
dataSource.setDefaultAutoCommit(true);
dataSource.setMaxActive(1);
}
protected void tearDown() throws Exception {
dataSource.close();
}
public void test_autoCommit() throws Exception {
{
Connection conn = dataSource.getConnection();
conn.close();
}
Connection conn = dataSource.getConnection();
final CountDownLatch startLatch = new CountDownLatch(1);
final CountDownLatch endLatch = new CountDownLatch(1);
final AtomicInteger errorCount = new AtomicInteger();
Thread thread = new Thread() {
public void run() {
try {
startLatch.countDown();
dataSource.getConnection();
} catch (Exception e) {
errorCount.incrementAndGet();
} finally {
endLatch.countDown();
}
}
};
thread.setDaemon(true);
thread.start();
startLatch.await();
Thread.sleep(10);
assertEquals(0, errorCount.get());
thread.interrupt();
endLatch.await();
assertEquals(1, errorCount.get());
conn.close();
}
}
|
DruidDataSourceTest_interrupt3
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/appender/HttpManager.java
|
{
"start": 1098,
"end": 1672
}
|
class ____ extends AbstractManager {
private final Configuration configuration;
protected HttpManager(final Configuration configuration, final LoggerContext loggerContext, final String name) {
super(loggerContext, name);
this.configuration = Objects.requireNonNull(configuration);
}
public Configuration getConfiguration() {
return configuration;
}
public void startup() {
// This default implementation does nothing
}
public abstract void send(Layout<?> layout, LogEvent event) throws Exception;
}
|
HttpManager
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/requests/VoteResponse.java
|
{
"start": 1447,
"end": 2606
}
|
class ____ extends AbstractResponse {
private final VoteResponseData data;
public VoteResponse(VoteResponseData data) {
super(ApiKeys.VOTE);
this.data = data;
}
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errors = new EnumMap<>(Errors.class);
errors.put(Errors.forCode(data.errorCode()), 1);
for (VoteResponseData.TopicData topicResponse : data.topics()) {
for (VoteResponseData.PartitionData partitionResponse : topicResponse.partitions()) {
updateErrorCounts(errors, Errors.forCode(partitionResponse.errorCode()));
}
}
return errors;
}
@Override
public VoteResponseData data() {
return data;
}
@Override
public int throttleTimeMs() {
return DEFAULT_THROTTLE_TIME;
}
@Override
public void maybeSetThrottleTimeMs(int throttleTimeMs) {
// Not supported by the response schema
}
public static VoteResponse parse(Readable readable, short version) {
return new VoteResponse(new VoteResponseData(readable, version));
}
}
|
VoteResponse
|
java
|
apache__camel
|
components/camel-google/camel-google-pubsub-lite/src/main/java/org/apache/camel/component/google/pubsublite/serializer/GooglePubsubSerializer.java
|
{
"start": 1027,
"end": 1391
}
|
interface ____ {
/**
* Serializes an object payload to a byte array.
*
* @param payload The payload to serialize
* @return The serialized payload as a byte array
* @throws IOException If the serialization process encountered errors
*/
byte[] serialize(Object payload) throws IOException;
}
|
GooglePubsubSerializer
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/builditem/LogHandlerBuildItem.java
|
{
"start": 314,
"end": 930
}
|
class ____ extends MultiBuildItem {
private final RuntimeValue<Optional<Handler>> handlerValue;
/**
* Construct a new instance.
*
* @param handlerValue the handler value to add to the run time configuration
*/
public LogHandlerBuildItem(final RuntimeValue<Optional<Handler>> handlerValue) {
this.handlerValue = Assert.checkNotNullParam("handlerValue", handlerValue);
}
/**
* Get the handler value.
*
* @return the handler value
*/
public RuntimeValue<Optional<Handler>> getHandlerValue() {
return handlerValue;
}
}
|
LogHandlerBuildItem
|
java
|
redisson__redisson
|
redisson/src/test/java/org/redisson/codec/protobuf/nativeData/Proto2AllTypes.java
|
{
"start": 30424,
"end": 67579
}
|
class ____ extends
com.google.protobuf.GeneratedMessage.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:AllTypes2)
Proto2AllTypes.AllTypes2OrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return Proto2AllTypes.internal_static_org_redisson_codec_protobuf_raw_AllTypes2_descriptor;
}
@Override
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return Proto2AllTypes.internal_static_org_redisson_codec_protobuf_raw_AllTypes2_fieldAccessorTable
.ensureFieldAccessorsInitialized(
Proto2AllTypes.AllTypes2.class, Proto2AllTypes.AllTypes2.Builder.class);
}
// Construct using Proto2AllTypes.AllTypes2.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
}
@Override
public Builder clear() {
super.clear();
bitField0_ = 0;
doubleType_ = emptyDoubleList();
floatType_ = 0F;
int32Type_ = 0;
int64Type_ = 0L;
uint32Type_ = 0;
uint64Type_ = 0L;
sint32Type_ = 0;
sint64Type_ = 0L;
fixed32Type_ = 0;
fixed64Type_ = 0L;
sfixed32Type_ = 0;
sfixed64Type_ = 0L;
boolType_ = false;
stringType_ = "";
bytesType_ = com.google.protobuf.ByteString.EMPTY;
return this;
}
@Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return Proto2AllTypes.internal_static_org_redisson_codec_protobuf_raw_AllTypes2_descriptor;
}
@Override
public Proto2AllTypes.AllTypes2 getDefaultInstanceForType() {
return Proto2AllTypes.AllTypes2.getDefaultInstance();
}
@Override
public Proto2AllTypes.AllTypes2 build() {
Proto2AllTypes.AllTypes2 result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@Override
public Proto2AllTypes.AllTypes2 buildPartial() {
Proto2AllTypes.AllTypes2 result = new Proto2AllTypes.AllTypes2(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(Proto2AllTypes.AllTypes2 result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
doubleType_.makeImmutable();
result.doubleType_ = doubleType_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.floatType_ = floatType_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.int32Type_ = int32Type_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.int64Type_ = int64Type_;
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.uint32Type_ = uint32Type_;
to_bitField0_ |= 0x00000008;
}
if (((from_bitField0_ & 0x00000020) != 0)) {
result.uint64Type_ = uint64Type_;
to_bitField0_ |= 0x00000010;
}
if (((from_bitField0_ & 0x00000040) != 0)) {
result.sint32Type_ = sint32Type_;
to_bitField0_ |= 0x00000020;
}
if (((from_bitField0_ & 0x00000080) != 0)) {
result.sint64Type_ = sint64Type_;
to_bitField0_ |= 0x00000040;
}
if (((from_bitField0_ & 0x00000100) != 0)) {
result.fixed32Type_ = fixed32Type_;
to_bitField0_ |= 0x00000080;
}
if (((from_bitField0_ & 0x00000200) != 0)) {
result.fixed64Type_ = fixed64Type_;
to_bitField0_ |= 0x00000100;
}
if (((from_bitField0_ & 0x00000400) != 0)) {
result.sfixed32Type_ = sfixed32Type_;
to_bitField0_ |= 0x00000200;
}
if (((from_bitField0_ & 0x00000800) != 0)) {
result.sfixed64Type_ = sfixed64Type_;
to_bitField0_ |= 0x00000400;
}
if (((from_bitField0_ & 0x00001000) != 0)) {
result.boolType_ = boolType_;
to_bitField0_ |= 0x00000800;
}
if (((from_bitField0_ & 0x00002000) != 0)) {
result.stringType_ = stringType_;
to_bitField0_ |= 0x00001000;
}
if (((from_bitField0_ & 0x00004000) != 0)) {
result.bytesType_ = bytesType_;
to_bitField0_ |= 0x00002000;
}
result.bitField0_ |= to_bitField0_;
}
@Override
public Builder clone() {
return super.clone();
}
@Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return super.setField(field, value);
}
@Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return super.setRepeatedField(field, index, value);
}
@Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return super.addRepeatedField(field, value);
}
@Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof Proto2AllTypes.AllTypes2) {
return mergeFrom((Proto2AllTypes.AllTypes2)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(Proto2AllTypes.AllTypes2 other) {
if (other == Proto2AllTypes.AllTypes2.getDefaultInstance()) return this;
if (!other.doubleType_.isEmpty()) {
if (doubleType_.isEmpty()) {
doubleType_ = other.doubleType_;
doubleType_.makeImmutable();
bitField0_ |= 0x00000001;
} else {
ensureDoubleTypeIsMutable();
doubleType_.addAll(other.doubleType_);
}
onChanged();
}
if (other.hasFloatType()) {
setFloatType(other.getFloatType());
}
if (other.hasInt32Type()) {
setInt32Type(other.getInt32Type());
}
if (other.hasInt64Type()) {
setInt64Type(other.getInt64Type());
}
if (other.hasUint32Type()) {
setUint32Type(other.getUint32Type());
}
if (other.hasUint64Type()) {
setUint64Type(other.getUint64Type());
}
if (other.hasSint32Type()) {
setSint32Type(other.getSint32Type());
}
if (other.hasSint64Type()) {
setSint64Type(other.getSint64Type());
}
if (other.hasFixed32Type()) {
setFixed32Type(other.getFixed32Type());
}
if (other.hasFixed64Type()) {
setFixed64Type(other.getFixed64Type());
}
if (other.hasSfixed32Type()) {
setSfixed32Type(other.getSfixed32Type());
}
if (other.hasSfixed64Type()) {
setSfixed64Type(other.getSfixed64Type());
}
if (other.hasBoolType()) {
setBoolType(other.getBoolType());
}
if (other.hasStringType()) {
stringType_ = other.stringType_;
bitField0_ |= 0x00002000;
onChanged();
}
if (other.hasBytesType()) {
setBytesType(other.getBytesType());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@Override
public final boolean isInitialized() {
if (!hasInt32Type()) {
return false;
}
return true;
}
@Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 9: {
double v = input.readDouble();
ensureDoubleTypeIsMutable();
doubleType_.addDouble(v);
break;
} // case 9
case 10: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
int alloc = length > 4096 ? 4096 : length;
ensureDoubleTypeIsMutable(alloc / 8);
while (input.getBytesUntilLimit() > 0) {
doubleType_.addDouble(input.readDouble());
}
input.popLimit(limit);
break;
} // case 10
case 21: {
floatType_ = input.readFloat();
bitField0_ |= 0x00000002;
break;
} // case 21
case 24: {
int32Type_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 32: {
int64Type_ = input.readInt64();
bitField0_ |= 0x00000008;
break;
} // case 32
case 40: {
uint32Type_ = input.readUInt32();
bitField0_ |= 0x00000010;
break;
} // case 40
case 48: {
uint64Type_ = input.readUInt64();
bitField0_ |= 0x00000020;
break;
} // case 48
case 56: {
sint32Type_ = input.readSInt32();
bitField0_ |= 0x00000040;
break;
} // case 56
case 64: {
sint64Type_ = input.readSInt64();
bitField0_ |= 0x00000080;
break;
} // case 64
case 77: {
fixed32Type_ = input.readFixed32();
bitField0_ |= 0x00000100;
break;
} // case 77
case 81: {
fixed64Type_ = input.readFixed64();
bitField0_ |= 0x00000200;
break;
} // case 81
case 93: {
sfixed32Type_ = input.readSFixed32();
bitField0_ |= 0x00000400;
break;
} // case 93
case 97: {
sfixed64Type_ = input.readSFixed64();
bitField0_ |= 0x00000800;
break;
} // case 97
case 104: {
boolType_ = input.readBool();
bitField0_ |= 0x00001000;
break;
} // case 104
case 114: {
stringType_ = input.readBytes();
bitField0_ |= 0x00002000;
break;
} // case 114
case 122: {
bytesType_ = input.readBytes();
bitField0_ |= 0x00004000;
break;
} // case 122
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.Internal.DoubleList doubleType_ = emptyDoubleList();
private void ensureDoubleTypeIsMutable() {
if (!doubleType_.isModifiable()) {
doubleType_ = makeMutableCopy(doubleType_);
}
bitField0_ |= 0x00000001;
}
private void ensureDoubleTypeIsMutable(int capacity) {
if (!doubleType_.isModifiable()) {
doubleType_ = makeMutableCopy(doubleType_, capacity);
}
bitField0_ |= 0x00000001;
}
/**
* <pre>
*types from https://protobuf.dev/programming-guides/proto2/
* </pre>
*
* <code>repeated double doubleType = 1;</code>
* @return A list containing the doubleType.
*/
public java.util.List<Double>
getDoubleTypeList() {
doubleType_.makeImmutable();
return doubleType_;
}
/**
* <pre>
*types from https://protobuf.dev/programming-guides/proto2/
* </pre>
*
* <code>repeated double doubleType = 1;</code>
* @return The count of doubleType.
*/
public int getDoubleTypeCount() {
return doubleType_.size();
}
/**
* <pre>
*types from https://protobuf.dev/programming-guides/proto2/
* </pre>
*
* <code>repeated double doubleType = 1;</code>
* @param index The index of the element to return.
* @return The doubleType at the given index.
*/
public double getDoubleType(int index) {
return doubleType_.getDouble(index);
}
/**
* <pre>
*types from https://protobuf.dev/programming-guides/proto2/
* </pre>
*
* <code>repeated double doubleType = 1;</code>
* @param index The index to set the value at.
* @param value The doubleType to set.
* @return This builder for chaining.
*/
public Builder setDoubleType(
int index, double value) {
ensureDoubleTypeIsMutable();
doubleType_.setDouble(index, value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
*types from https://protobuf.dev/programming-guides/proto2/
* </pre>
*
* <code>repeated double doubleType = 1;</code>
* @param value The doubleType to add.
* @return This builder for chaining.
*/
public Builder addDoubleType(double value) {
ensureDoubleTypeIsMutable();
doubleType_.addDouble(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
*types from https://protobuf.dev/programming-guides/proto2/
* </pre>
*
* <code>repeated double doubleType = 1;</code>
* @param values The doubleType to add.
* @return This builder for chaining.
*/
public Builder addAllDoubleType(
Iterable<? extends Double> values) {
ensureDoubleTypeIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, doubleType_);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
*types from https://protobuf.dev/programming-guides/proto2/
* </pre>
*
* <code>repeated double doubleType = 1;</code>
* @return This builder for chaining.
*/
public Builder clearDoubleType() {
doubleType_ = emptyDoubleList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
private float floatType_ ;
/**
* <code>optional float floatType = 2;</code>
* @return Whether the floatType field is set.
*/
@Override
public boolean hasFloatType() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <code>optional float floatType = 2;</code>
* @return The floatType.
*/
@Override
public float getFloatType() {
return floatType_;
}
/**
* <code>optional float floatType = 2;</code>
* @param value The floatType to set.
* @return This builder for chaining.
*/
public Builder setFloatType(float value) {
floatType_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <code>optional float floatType = 2;</code>
* @return This builder for chaining.
*/
public Builder clearFloatType() {
bitField0_ = (bitField0_ & ~0x00000002);
floatType_ = 0F;
onChanged();
return this;
}
private int int32Type_ ;
/**
* <code>required int32 int32Type = 3;</code>
* @return Whether the int32Type field is set.
*/
@Override
public boolean hasInt32Type() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <code>required int32 int32Type = 3;</code>
* @return The int32Type.
*/
@Override
public int getInt32Type() {
return int32Type_;
}
/**
* <code>required int32 int32Type = 3;</code>
* @param value The int32Type to set.
* @return This builder for chaining.
*/
public Builder setInt32Type(int value) {
int32Type_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <code>required int32 int32Type = 3;</code>
* @return This builder for chaining.
*/
public Builder clearInt32Type() {
bitField0_ = (bitField0_ & ~0x00000004);
int32Type_ = 0;
onChanged();
return this;
}
private long int64Type_ ;
/**
* <code>optional int64 int64Type = 4;</code>
* @return Whether the int64Type field is set.
*/
@Override
public boolean hasInt64Type() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* <code>optional int64 int64Type = 4;</code>
* @return The int64Type.
*/
@Override
public long getInt64Type() {
return int64Type_;
}
/**
* <code>optional int64 int64Type = 4;</code>
* @param value The int64Type to set.
* @return This builder for chaining.
*/
public Builder setInt64Type(long value) {
int64Type_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* <code>optional int64 int64Type = 4;</code>
* @return This builder for chaining.
*/
public Builder clearInt64Type() {
bitField0_ = (bitField0_ & ~0x00000008);
int64Type_ = 0L;
onChanged();
return this;
}
private int uint32Type_ ;
/**
* <code>optional uint32 uint32Type = 5;</code>
* @return Whether the uint32Type field is set.
*/
@Override
public boolean hasUint32Type() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* <code>optional uint32 uint32Type = 5;</code>
* @return The uint32Type.
*/
@Override
public int getUint32Type() {
return uint32Type_;
}
/**
* <code>optional uint32 uint32Type = 5;</code>
* @param value The uint32Type to set.
* @return This builder for chaining.
*/
public Builder setUint32Type(int value) {
uint32Type_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
* <code>optional uint32 uint32Type = 5;</code>
* @return This builder for chaining.
*/
public Builder clearUint32Type() {
bitField0_ = (bitField0_ & ~0x00000010);
uint32Type_ = 0;
onChanged();
return this;
}
private long uint64Type_ ;
/**
* <code>optional uint64 uint64Type = 6;</code>
* @return Whether the uint64Type field is set.
*/
@Override
public boolean hasUint64Type() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* <code>optional uint64 uint64Type = 6;</code>
* @return The uint64Type.
*/
@Override
public long getUint64Type() {
return uint64Type_;
}
/**
* <code>optional uint64 uint64Type = 6;</code>
* @param value The uint64Type to set.
* @return This builder for chaining.
*/
public Builder setUint64Type(long value) {
uint64Type_ = value;
bitField0_ |= 0x00000020;
onChanged();
return this;
}
/**
* <code>optional uint64 uint64Type = 6;</code>
* @return This builder for chaining.
*/
public Builder clearUint64Type() {
bitField0_ = (bitField0_ & ~0x00000020);
uint64Type_ = 0L;
onChanged();
return this;
}
private int sint32Type_ ;
/**
* <code>optional sint32 sint32Type = 7;</code>
* @return Whether the sint32Type field is set.
*/
@Override
public boolean hasSint32Type() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
* <code>optional sint32 sint32Type = 7;</code>
* @return The sint32Type.
*/
@Override
public int getSint32Type() {
return sint32Type_;
}
/**
* <code>optional sint32 sint32Type = 7;</code>
* @param value The sint32Type to set.
* @return This builder for chaining.
*/
public Builder setSint32Type(int value) {
sint32Type_ = value;
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
* <code>optional sint32 sint32Type = 7;</code>
* @return This builder for chaining.
*/
public Builder clearSint32Type() {
bitField0_ = (bitField0_ & ~0x00000040);
sint32Type_ = 0;
onChanged();
return this;
}
private long sint64Type_ ;
/**
* <code>optional sint64 sint64Type = 8;</code>
* @return Whether the sint64Type field is set.
*/
@Override
public boolean hasSint64Type() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
* <code>optional sint64 sint64Type = 8;</code>
* @return The sint64Type.
*/
@Override
public long getSint64Type() {
return sint64Type_;
}
/**
* <code>optional sint64 sint64Type = 8;</code>
* @param value The sint64Type to set.
* @return This builder for chaining.
*/
public Builder setSint64Type(long value) {
sint64Type_ = value;
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
* <code>optional sint64 sint64Type = 8;</code>
* @return This builder for chaining.
*/
public Builder clearSint64Type() {
bitField0_ = (bitField0_ & ~0x00000080);
sint64Type_ = 0L;
onChanged();
return this;
}
private int fixed32Type_ ;
/**
* <code>optional fixed32 fixed32Type = 9;</code>
* @return Whether the fixed32Type field is set.
*/
@Override
public boolean hasFixed32Type() {
return ((bitField0_ & 0x00000100) != 0);
}
/**
* <code>optional fixed32 fixed32Type = 9;</code>
* @return The fixed32Type.
*/
@Override
public int getFixed32Type() {
return fixed32Type_;
}
/**
* <code>optional fixed32 fixed32Type = 9;</code>
* @param value The fixed32Type to set.
* @return This builder for chaining.
*/
public Builder setFixed32Type(int value) {
fixed32Type_ = value;
bitField0_ |= 0x00000100;
onChanged();
return this;
}
/**
* <code>optional fixed32 fixed32Type = 9;</code>
* @return This builder for chaining.
*/
public Builder clearFixed32Type() {
bitField0_ = (bitField0_ & ~0x00000100);
fixed32Type_ = 0;
onChanged();
return this;
}
private long fixed64Type_ ;
/**
* <code>optional fixed64 fixed64Type = 10;</code>
* @return Whether the fixed64Type field is set.
*/
@Override
public boolean hasFixed64Type() {
return ((bitField0_ & 0x00000200) != 0);
}
/**
* <code>optional fixed64 fixed64Type = 10;</code>
* @return The fixed64Type.
*/
@Override
public long getFixed64Type() {
return fixed64Type_;
}
/**
* <code>optional fixed64 fixed64Type = 10;</code>
* @param value The fixed64Type to set.
* @return This builder for chaining.
*/
public Builder setFixed64Type(long value) {
fixed64Type_ = value;
bitField0_ |= 0x00000200;
onChanged();
return this;
}
/**
* <code>optional fixed64 fixed64Type = 10;</code>
* @return This builder for chaining.
*/
public Builder clearFixed64Type() {
bitField0_ = (bitField0_ & ~0x00000200);
fixed64Type_ = 0L;
onChanged();
return this;
}
private int sfixed32Type_ ;
/**
* <code>optional sfixed32 sfixed32Type = 11;</code>
* @return Whether the sfixed32Type field is set.
*/
@Override
public boolean hasSfixed32Type() {
return ((bitField0_ & 0x00000400) != 0);
}
/**
* <code>optional sfixed32 sfixed32Type = 11;</code>
* @return The sfixed32Type.
*/
@Override
public int getSfixed32Type() {
return sfixed32Type_;
}
/**
* <code>optional sfixed32 sfixed32Type = 11;</code>
* @param value The sfixed32Type to set.
* @return This builder for chaining.
*/
public Builder setSfixed32Type(int value) {
sfixed32Type_ = value;
bitField0_ |= 0x00000400;
onChanged();
return this;
}
/**
* <code>optional sfixed32 sfixed32Type = 11;</code>
* @return This builder for chaining.
*/
public Builder clearSfixed32Type() {
bitField0_ = (bitField0_ & ~0x00000400);
sfixed32Type_ = 0;
onChanged();
return this;
}
private long sfixed64Type_ ;
/**
* <code>optional sfixed64 sfixed64Type = 12;</code>
* @return Whether the sfixed64Type field is set.
*/
@Override
public boolean hasSfixed64Type() {
return ((bitField0_ & 0x00000800) != 0);
}
/**
* <code>optional sfixed64 sfixed64Type = 12;</code>
* @return The sfixed64Type.
*/
@Override
public long getSfixed64Type() {
return sfixed64Type_;
}
/**
* <code>optional sfixed64 sfixed64Type = 12;</code>
* @param value The sfixed64Type to set.
* @return This builder for chaining.
*/
public Builder setSfixed64Type(long value) {
sfixed64Type_ = value;
bitField0_ |= 0x00000800;
onChanged();
return this;
}
/**
* <code>optional sfixed64 sfixed64Type = 12;</code>
* @return This builder for chaining.
*/
public Builder clearSfixed64Type() {
bitField0_ = (bitField0_ & ~0x00000800);
sfixed64Type_ = 0L;
onChanged();
return this;
}
private boolean boolType_ ;
/**
* <code>optional bool boolType = 13;</code>
* @return Whether the boolType field is set.
*/
@Override
public boolean hasBoolType() {
return ((bitField0_ & 0x00001000) != 0);
}
/**
* <code>optional bool boolType = 13;</code>
* @return The boolType.
*/
@Override
public boolean getBoolType() {
return boolType_;
}
/**
* <code>optional bool boolType = 13;</code>
* @param value The boolType to set.
* @return This builder for chaining.
*/
public Builder setBoolType(boolean value) {
boolType_ = value;
bitField0_ |= 0x00001000;
onChanged();
return this;
}
/**
* <code>optional bool boolType = 13;</code>
* @return This builder for chaining.
*/
public Builder clearBoolType() {
bitField0_ = (bitField0_ & ~0x00001000);
boolType_ = false;
onChanged();
return this;
}
private Object stringType_ = "";
/**
* <code>optional string stringType = 14;</code>
* @return Whether the stringType field is set.
*/
public boolean hasStringType() {
return ((bitField0_ & 0x00002000) != 0);
}
/**
* <code>optional string stringType = 14;</code>
* @return The stringType.
*/
public String getStringType() {
Object ref = stringType_;
if (!(ref instanceof String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
stringType_ = s;
}
return s;
} else {
return (String) ref;
}
}
/**
* <code>optional string stringType = 14;</code>
* @return The bytes for stringType.
*/
public com.google.protobuf.ByteString
getStringTypeBytes() {
Object ref = stringType_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(String) ref);
stringType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string stringType = 14;</code>
* @param value The stringType to set.
* @return This builder for chaining.
*/
public Builder setStringType(
String value) {
if (value == null) { throw new NullPointerException(); }
stringType_ = value;
bitField0_ |= 0x00002000;
onChanged();
return this;
}
/**
* <code>optional string stringType = 14;</code>
* @return This builder for chaining.
*/
public Builder clearStringType() {
stringType_ = getDefaultInstance().getStringType();
bitField0_ = (bitField0_ & ~0x00002000);
onChanged();
return this;
}
/**
* <code>optional string stringType = 14;</code>
* @param value The bytes for stringType to set.
* @return This builder for chaining.
*/
public Builder setStringTypeBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
stringType_ = value;
bitField0_ |= 0x00002000;
onChanged();
return this;
}
private com.google.protobuf.ByteString bytesType_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes bytesType = 15;</code>
* @return Whether the bytesType field is set.
*/
@Override
public boolean hasBytesType() {
return ((bitField0_ & 0x00004000) != 0);
}
/**
* <code>optional bytes bytesType = 15;</code>
* @return The bytesType.
*/
@Override
public com.google.protobuf.ByteString getBytesType() {
return bytesType_;
}
/**
* <code>optional bytes bytesType = 15;</code>
* @param value The bytesType to set.
* @return This builder for chaining.
*/
public Builder setBytesType(com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
bytesType_ = value;
bitField0_ |= 0x00004000;
onChanged();
return this;
}
/**
* <code>optional bytes bytesType = 15;</code>
* @return This builder for chaining.
*/
public Builder clearBytesType() {
bitField0_ = (bitField0_ & ~0x00004000);
bytesType_ = getDefaultInstance().getBytesType();
onChanged();
return this;
}
@Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:AllTypes2)
}
// @@protoc_insertion_point(class_scope:AllTypes2)
private static final Proto2AllTypes.AllTypes2 DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new Proto2AllTypes.AllTypes2();
}
public static Proto2AllTypes.AllTypes2 getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@Deprecated public static final com.google.protobuf.Parser<AllTypes2>
PARSER = new com.google.protobuf.AbstractParser<AllTypes2>() {
@Override
public AllTypes2 parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<AllTypes2> parser() {
return PARSER;
}
@Override
public com.google.protobuf.Parser<AllTypes2> getParserForType() {
return PARSER;
}
@Override
public Proto2AllTypes.AllTypes2 getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
private static final com.google.protobuf.Descriptors.Descriptor
internal_static_org_redisson_codec_protobuf_raw_AllTypes2_descriptor;
private static final
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_redisson_codec_protobuf_raw_AllTypes2_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
String[] descriptorData = {
"\n\024proto2AllTypes.proto\022\037org.redisson.cod" +
"ec.protobuf.raw\"\267\002\n\tAllTypes2\022\022\n\ndoubleT" +
"ype\030\001 \003(\001\022\021\n\tfloatType\030\002 \001(\002\022\021\n\tint32Typ" +
"e\030\003 \002(\005\022\021\n\tint64Type\030\004 \001(\003\022\022\n\nuint32Type" +
"\030\005 \001(\r\022\022\n\nuint64Type\030\006 \001(\004\022\022\n\nsint32Type" +
"\030\007 \001(\021\022\022\n\nsint64Type\030\010 \001(\022\022\023\n\013fixed32Typ" +
"e\030\t \001(\007\022\023\n\013fixed64Type\030\n \001(\006\022\024\n\014sfixed32" +
"Type\030\013 \001(\017\022\024\n\014sfixed64Type\030\014 \001(\020\022\020\n\010bool" +
"Type\030\r \001(\010\022\022\n\nstringType\030\016 \001(\t\022\021\n\tbytesT" +
"ype\030\017 \001(\014B1\n\037org.redisson.codec.protobuf" +
".rawB\016Proto2AllTypes"
};
descriptor = com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
});
internal_static_org_redisson_codec_protobuf_raw_AllTypes2_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_org_redisson_codec_protobuf_raw_AllTypes2_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_redisson_codec_protobuf_raw_AllTypes2_descriptor,
new String[] { "DoubleType", "FloatType", "Int32Type", "Int64Type", "Uint32Type", "Uint64Type", "Sint32Type", "Sint64Type", "Fixed32Type", "Fixed64Type", "Sfixed32Type", "Sfixed64Type", "BoolType", "StringType", "BytesType", });
}
// @@protoc_insertion_point(outer_class_scope)
}
|
Builder
|
java
|
apache__kafka
|
streams/src/test/java/org/apache/kafka/streams/state/internals/ThreadCacheTest.java
|
{
"start": 1856,
"end": 29333
}
|
class ____ {
final String namespace = "0.0-namespace";
final String namespace1 = "0.1-namespace";
final String namespace2 = "0.2-namespace";
private final LogContext logContext = new LogContext("testCache ");
private final byte[][] bytes = new byte[][]{{0}, {1}, {2}, {3}, {4}, {5}, {6}, {7}, {8}, {9}, {10}};
private final byte[] rawKey = new byte[]{0};
private final byte[] rawValue = new byte[]{0};
@Test
public void basicPutGet() {
final List<KeyValue<String, String>> toInsert = Arrays.asList(
new KeyValue<>("K1", "V1"),
new KeyValue<>("K2", "V2"),
new KeyValue<>("K3", "V3"),
new KeyValue<>("K4", "V4"),
new KeyValue<>("K5", "V5"));
final KeyValue<String, String> kv = toInsert.get(0);
final ThreadCache cache = new ThreadCache(logContext,
toInsert.size() * memoryCacheEntrySize(kv.key.getBytes(), kv.value.getBytes(), ""),
new MockStreamsMetrics(new Metrics()));
for (final KeyValue<String, String> kvToInsert : toInsert) {
final Bytes key = Bytes.wrap(kvToInsert.key.getBytes());
final byte[] value = kvToInsert.value.getBytes();
cache.put(namespace, key, new LRUCacheEntry(value, new RecordHeaders(), true, 1L, 1L, 1, "", rawKey, rawValue));
}
for (final KeyValue<String, String> kvToInsert : toInsert) {
final Bytes key = Bytes.wrap(kvToInsert.key.getBytes());
final LRUCacheEntry entry = cache.get(namespace, key);
assertTrue(entry.isDirty());
assertEquals(new String(entry.value()), kvToInsert.value);
}
assertEquals(5, cache.gets());
assertEquals(5, cache.puts());
assertEquals(0, cache.evicts());
assertEquals(0, cache.flushes());
}
private void checkOverheads(final double entryFactor,
final double systemFactor,
final long desiredCacheSize,
final int keySizeBytes,
final int valueSizeBytes) {
final Runtime runtime = Runtime.getRuntime();
final long numElements = desiredCacheSize / memoryCacheEntrySize(new byte[keySizeBytes], new byte[valueSizeBytes], "");
System.gc();
final long prevRuntimeMemory = runtime.totalMemory() - runtime.freeMemory();
final ThreadCache cache = new ThreadCache(logContext, desiredCacheSize, new MockStreamsMetrics(new Metrics()));
final long size = cache.sizeBytes();
assertEquals(0, size);
for (int i = 0; i < numElements; i++) {
final String keyStr = "K" + i;
final Bytes key = Bytes.wrap(keyStr.getBytes());
final byte[] value = new byte[valueSizeBytes];
cache.put(namespace, key, new LRUCacheEntry(value, new RecordHeaders(), true, 1L, 1L, 1, "", rawKey, rawValue));
}
System.gc();
final double ceiling = desiredCacheSize + desiredCacheSize * entryFactor;
final long usedRuntimeMemory = runtime.totalMemory() - runtime.freeMemory() - prevRuntimeMemory;
assertTrue((double) cache.sizeBytes() <= ceiling);
assertTrue(cache.sizeBytes() * systemFactor >= usedRuntimeMemory,
"Used memory size " + usedRuntimeMemory + " greater than expected " + cache.sizeBytes() * systemFactor);
}
@Test
public void cacheOverheadsSmallValues() {
final Runtime runtime = Runtime.getRuntime();
final double factor = 0.05;
final double systemFactor = 3; // if I ask for a cache size of 10 MB, accept an overhead of 3x, i.e., 30 MBs might be allocated
final long desiredCacheSize = Math.min(100 * 1024 * 1024L, runtime.maxMemory());
final int keySizeBytes = 8;
final int valueSizeBytes = 100;
checkOverheads(factor, systemFactor, desiredCacheSize, keySizeBytes, valueSizeBytes);
}
@Test
public void cacheOverheadsLargeValues() {
final Runtime runtime = Runtime.getRuntime();
final double factor = 0.05;
final double systemFactor = 2; // if I ask for a cache size of 10 MB, accept an overhead of 2x, i.e., 20 MBs might be allocated
final long desiredCacheSize = Math.min(100 * 1024 * 1024L, runtime.maxMemory());
final int keySizeBytes = 8;
final int valueSizeBytes = 1000;
checkOverheads(factor, systemFactor, desiredCacheSize, keySizeBytes, valueSizeBytes);
}
static long memoryCacheEntrySize(final byte[] key, final byte[] value, final String topic) {
return key.length +
value.length +
1 + // isDirty
8 + // timestamp
8 + // offset
4 +
topic.length() +
// LRU Node entries
key.length +
8 + // entry
8 + // previous
8; // next
}
@Test
public void evict() {
final List<KeyValue<String, String>> received = new ArrayList<>();
final List<KeyValue<String, String>> expected = Collections.singletonList(
new KeyValue<>("K1", "V1"));
final List<KeyValue<String, String>> toInsert = Arrays.asList(
new KeyValue<>("K1", "V1"),
new KeyValue<>("K2", "V2"),
new KeyValue<>("K3", "V3"),
new KeyValue<>("K4", "V4"),
new KeyValue<>("K5", "V5"));
final KeyValue<String, String> kv = toInsert.get(0);
final ThreadCache cache = new ThreadCache(logContext,
memoryCacheEntrySize(kv.key.getBytes(), kv.value.getBytes(), ""),
new MockStreamsMetrics(new Metrics()));
cache.addDirtyEntryFlushListener(namespace, dirty -> {
for (final ThreadCache.DirtyEntry dirtyEntry : dirty) {
received.add(new KeyValue<>(dirtyEntry.key().toString(), new String(dirtyEntry.newValue())));
}
});
for (final KeyValue<String, String> kvToInsert : toInsert) {
final Bytes key = Bytes.wrap(kvToInsert.key.getBytes());
final byte[] value = kvToInsert.value.getBytes();
cache.put(namespace, key, new LRUCacheEntry(value, new RecordHeaders(), true, 1, 1, 1, "", rawKey, rawValue));
}
for (int i = 0; i < expected.size(); i++) {
final KeyValue<String, String> expectedRecord = expected.get(i);
final KeyValue<String, String> actualRecord = received.get(i);
assertEquals(expectedRecord, actualRecord);
}
assertEquals(4, cache.evicts());
}
@Test
public void shouldDelete() {
final ThreadCache cache = new ThreadCache(logContext, 10000L, new MockStreamsMetrics(new Metrics()));
final Bytes key = Bytes.wrap(new byte[]{0});
cache.put(namespace, key, dirtyEntry(key.get()));
assertArrayEquals(key.get(), cache.delete(namespace, key).value());
assertNull(cache.get(namespace, key));
}
@Test
public void shouldNotFlushAfterDelete() {
final Bytes key = Bytes.wrap(new byte[]{0});
final ThreadCache cache = new ThreadCache(logContext, 10000L, new MockStreamsMetrics(new Metrics()));
final List<ThreadCache.DirtyEntry> received = new ArrayList<>();
cache.addDirtyEntryFlushListener(namespace, received::addAll);
cache.put(namespace, key, dirtyEntry(key.get()));
assertArrayEquals(key.get(), cache.delete(namespace, key).value());
// flushing should have no further effect
cache.flush(namespace);
assertEquals(0, received.size());
assertEquals(1, cache.flushes());
}
@Test
public void shouldNotBlowUpOnNonExistentKeyWhenDeleting() {
final Bytes key = Bytes.wrap(new byte[]{0});
final ThreadCache cache = new ThreadCache(logContext, 10000L, new MockStreamsMetrics(new Metrics()));
cache.put(namespace, key, dirtyEntry(key.get()));
assertNull(cache.delete(namespace, Bytes.wrap(new byte[]{1})));
}
@Test
public void shouldNotBlowUpOnNonExistentNamespaceWhenDeleting() {
final ThreadCache cache = new ThreadCache(logContext, 10000L, new MockStreamsMetrics(new Metrics()));
assertNull(cache.delete(namespace, Bytes.wrap(new byte[]{1})));
}
@Test
public void shouldNotClashWithOverlappingNames() {
final ThreadCache cache = new ThreadCache(logContext, 10000L, new MockStreamsMetrics(new Metrics()));
final Bytes nameByte = Bytes.wrap(new byte[]{0});
final Bytes name1Byte = Bytes.wrap(new byte[]{1});
cache.put(namespace1, nameByte, dirtyEntry(nameByte.get()));
cache.put(namespace2, nameByte, dirtyEntry(name1Byte.get()));
assertArrayEquals(nameByte.get(), cache.get(namespace1, nameByte).value());
assertArrayEquals(name1Byte.get(), cache.get(namespace2, nameByte).value());
}
private ThreadCache setupThreadCache(final int first, final int last, final long entrySize, final boolean reverse) {
final ThreadCache cache = new ThreadCache(logContext, entrySize, new MockStreamsMetrics(new Metrics()));
cache.addDirtyEntryFlushListener(namespace, dirty -> { });
int index = first;
while ((!reverse && index < last) || (reverse && index >= last)) {
cache.put(namespace, Bytes.wrap(bytes[index]), dirtyEntry(bytes[index]));
if (!reverse)
index++;
else
index--;
}
return cache;
}
@Test
public void shouldPeekNextKey() {
final ThreadCache cache = setupThreadCache(0, 1, 10000L, false);
final Bytes theByte = Bytes.wrap(new byte[]{0});
final ThreadCache.MemoryLRUCacheBytesIterator iterator = cache.range(namespace, theByte, Bytes.wrap(new byte[]{1}));
assertEquals(theByte, iterator.peekNextKey());
assertEquals(theByte, iterator.peekNextKey());
}
@Test
public void shouldPeekNextKeyReverseRange() {
final ThreadCache cache = setupThreadCache(1, 1, 10000L, true);
final Bytes theByte = Bytes.wrap(new byte[]{1});
final ThreadCache.MemoryLRUCacheBytesIterator iterator = cache.reverseRange(namespace, Bytes.wrap(new byte[]{0}), theByte);
assertThat(iterator.peekNextKey(), is(theByte));
assertThat(iterator.peekNextKey(), is(theByte));
}
@Test
public void shouldGetSameKeyAsPeekNext() {
final ThreadCache cache = setupThreadCache(0, 1, 10000L, false);
final Bytes theByte = Bytes.wrap(new byte[]{0});
final ThreadCache.MemoryLRUCacheBytesIterator iterator = cache.range(namespace, theByte, Bytes.wrap(new byte[]{1}));
assertThat(iterator.peekNextKey(), is(iterator.next().key));
}
@Test
public void shouldGetSameKeyAsPeekNextReverseRange() {
final ThreadCache cache = setupThreadCache(1, 1, 10000L, true);
final Bytes theByte = Bytes.wrap(new byte[]{1});
final ThreadCache.MemoryLRUCacheBytesIterator iterator = cache.reverseRange(namespace, Bytes.wrap(new byte[]{0}), theByte);
assertThat(iterator.peekNextKey(), is(iterator.next().key));
}
private void shouldThrowIfNoPeekNextKey(final Supplier<ThreadCache.MemoryLRUCacheBytesIterator> methodUnderTest) {
final ThreadCache.MemoryLRUCacheBytesIterator iterator = methodUnderTest.get();
assertThrows(NoSuchElementException.class, iterator::peekNextKey);
}
@Test
public void shouldThrowIfNoPeekNextKeyRange() {
final ThreadCache cache = setupThreadCache(0, 0, 10000L, false);
shouldThrowIfNoPeekNextKey(() -> cache.range(namespace, Bytes.wrap(new byte[]{0}), Bytes.wrap(new byte[]{1})));
}
@Test
public void shouldThrowIfNoPeekNextKeyReverseRange() {
final ThreadCache cache = setupThreadCache(-1, 0, 10000L, true);
shouldThrowIfNoPeekNextKey(() -> cache.reverseRange(namespace, Bytes.wrap(new byte[]{0}), Bytes.wrap(new byte[]{1})));
}
@Test
public void shouldReturnFalseIfNoNextKey() {
final ThreadCache cache = setupThreadCache(0, 0, 10000L, false);
final ThreadCache.MemoryLRUCacheBytesIterator iterator = cache.range(namespace, Bytes.wrap(new byte[]{0}), Bytes.wrap(new byte[]{1}));
assertFalse(iterator.hasNext());
}
@Test
public void shouldReturnFalseIfNoNextKeyReverseRange() {
final ThreadCache cache = setupThreadCache(-1, 0, 10000L, true);
final ThreadCache.MemoryLRUCacheBytesIterator iterator = cache.reverseRange(namespace, Bytes.wrap(new byte[]{0}), Bytes.wrap(new byte[]{1}));
assertFalse(iterator.hasNext());
}
@Test
public void shouldPeekAndIterateOverRange() {
final ThreadCache cache = setupThreadCache(0, 10, 10000L, false);
final ThreadCache.MemoryLRUCacheBytesIterator iterator = cache.range(namespace, Bytes.wrap(new byte[]{1}), Bytes.wrap(new byte[]{4}));
int bytesIndex = 1;
while (iterator.hasNext()) {
final Bytes peekedKey = iterator.peekNextKey();
final KeyValue<Bytes, LRUCacheEntry> next = iterator.next();
assertArrayEquals(bytes[bytesIndex], peekedKey.get());
assertArrayEquals(bytes[bytesIndex], next.key.get());
bytesIndex++;
}
assertEquals(5, bytesIndex);
}
@Test
public void shouldSkipToEntryWhenToInclusiveIsFalseInRange() {
final ThreadCache cache = setupThreadCache(0, 10, 10000L, false);
final ThreadCache.MemoryLRUCacheBytesIterator iterator = cache.range(namespace, Bytes.wrap(new byte[]{1}), Bytes.wrap(new byte[]{4}), false);
int bytesIndex = 1;
while (iterator.hasNext()) {
final Bytes peekedKey = iterator.peekNextKey();
final KeyValue<Bytes, LRUCacheEntry> next = iterator.next();
assertArrayEquals(bytes[bytesIndex], peekedKey.get());
assertArrayEquals(bytes[bytesIndex], next.key.get());
bytesIndex++;
}
assertEquals(4, bytesIndex);
}
@Test
public void shouldPeekAndIterateOverReverseRange() {
final ThreadCache cache = setupThreadCache(10, 0, 10000L, true);
final ThreadCache.MemoryLRUCacheBytesIterator iterator = cache.reverseRange(namespace, Bytes.wrap(new byte[]{1}), Bytes.wrap(new byte[]{4}));
int bytesIndex = 4;
while (iterator.hasNext()) {
final Bytes peekedKey = iterator.peekNextKey();
final KeyValue<Bytes, LRUCacheEntry> next = iterator.next();
assertArrayEquals(bytes[bytesIndex], peekedKey.get());
assertArrayEquals(bytes[bytesIndex], next.key.get());
bytesIndex--;
}
assertEquals(0, bytesIndex);
}
@Test
public void shouldSkipEntriesWhereValueHasBeenEvictedFromCache() {
final long entrySize = memoryCacheEntrySize(new byte[1], new byte[1], "");
final ThreadCache cache = setupThreadCache(0, 5, entrySize * 5L, false);
assertEquals(5, cache.size());
// should evict byte[] {0}
cache.put(namespace, Bytes.wrap(new byte[]{6}), dirtyEntry(new byte[]{6}));
final ThreadCache.MemoryLRUCacheBytesIterator range = cache.range(namespace, Bytes.wrap(new byte[]{0}), Bytes.wrap(new byte[]{5}));
assertEquals(Bytes.wrap(new byte[]{1}), range.peekNextKey());
}
@Test
public void shouldSkipEntriesWhereValueHasBeenEvictedFromCacheReverseRange() {
final long entrySize = memoryCacheEntrySize(new byte[1], new byte[1], "");
final ThreadCache cache = setupThreadCache(4, 0, entrySize * 5L, true);
assertEquals(5, cache.size());
// should evict byte[] {4}
cache.put(namespace, Bytes.wrap(new byte[]{6}), dirtyEntry(new byte[]{6}));
final ThreadCache.MemoryLRUCacheBytesIterator range = cache.reverseRange(namespace, Bytes.wrap(new byte[]{0}), Bytes.wrap(new byte[]{5}));
assertEquals(Bytes.wrap(new byte[]{3}), range.peekNextKey());
}
@Test
public void shouldFetchAllEntriesInCache() {
final ThreadCache cache = setupThreadCache(0, 11, 10000L, false);
final ThreadCache.MemoryLRUCacheBytesIterator iterator = cache.all(namespace);
int bytesIndex = 0;
while (iterator.hasNext()) {
final Bytes peekedKey = iterator.peekNextKey();
final KeyValue<Bytes, LRUCacheEntry> next = iterator.next();
assertArrayEquals(bytes[bytesIndex], peekedKey.get());
assertArrayEquals(bytes[bytesIndex], next.key.get());
bytesIndex++;
}
assertEquals(11, bytesIndex);
}
@Test
public void shouldFetchAllEntriesInCacheInReverseOrder() {
final ThreadCache cache = setupThreadCache(10, 0, 10000L, true);
final ThreadCache.MemoryLRUCacheBytesIterator iterator = cache.reverseAll(namespace);
int bytesIndex = 10;
while (iterator.hasNext()) {
final Bytes peekedKey = iterator.peekNextKey();
final KeyValue<Bytes, LRUCacheEntry> next = iterator.next();
assertArrayEquals(bytes[bytesIndex], peekedKey.get());
assertArrayEquals(bytes[bytesIndex], next.key.get());
bytesIndex--;
}
assertEquals(-1, bytesIndex);
}
@Test
public void shouldReturnAllUnevictedValuesFromCache() {
final long entrySize = memoryCacheEntrySize(new byte[1], new byte[1], "");
final ThreadCache cache = setupThreadCache(0, 5, entrySize * 5L, false);
assertEquals(5, cache.size());
// should evict byte[] {0}
cache.put(namespace, Bytes.wrap(new byte[]{6}), dirtyEntry(new byte[]{6}));
final ThreadCache.MemoryLRUCacheBytesIterator range = cache.all(namespace);
assertEquals(Bytes.wrap(new byte[]{1}), range.peekNextKey());
}
@Test
public void shouldReturnAllUnevictedValuesFromCacheInReverseOrder() {
final long entrySize = memoryCacheEntrySize(new byte[1], new byte[1], "");
final ThreadCache cache = setupThreadCache(4, 0, entrySize * 5L, true);
assertEquals(5, cache.size());
// should evict byte[] {4}
cache.put(namespace, Bytes.wrap(new byte[]{6}), dirtyEntry(new byte[]{6}));
final ThreadCache.MemoryLRUCacheBytesIterator range = cache.reverseAll(namespace);
assertEquals(Bytes.wrap(new byte[]{6}), range.peekNextKey());
}
@Test
public void shouldFlushDirtyEntriesForNamespace() {
final ThreadCache cache = new ThreadCache(logContext, 100000, new MockStreamsMetrics(new Metrics()));
final List<byte[]> received = new ArrayList<>();
cache.addDirtyEntryFlushListener(namespace1, dirty -> {
for (final ThreadCache.DirtyEntry dirtyEntry : dirty) {
received.add(dirtyEntry.key().get());
}
});
final List<byte[]> expected = Arrays.asList(new byte[]{0}, new byte[]{1}, new byte[]{2});
for (final byte[] bytes : expected) {
cache.put(namespace1, Bytes.wrap(bytes), dirtyEntry(bytes));
}
cache.put(namespace2, Bytes.wrap(new byte[]{4}), dirtyEntry(new byte[]{4}));
cache.flush(namespace1);
assertEquals(expected, received);
}
@Test
public void shouldNotFlushCleanEntriesForNamespace() {
final ThreadCache cache = new ThreadCache(logContext, 100000, new MockStreamsMetrics(new Metrics()));
final List<byte[]> received = new ArrayList<>();
cache.addDirtyEntryFlushListener(namespace1, dirty -> {
for (final ThreadCache.DirtyEntry dirtyEntry : dirty) {
received.add(dirtyEntry.key().get());
}
});
final List<byte[]> toInsert = Arrays.asList(new byte[]{0}, new byte[]{1}, new byte[]{2});
for (final byte[] bytes : toInsert) {
cache.put(namespace1, Bytes.wrap(bytes), cleanEntry(bytes));
}
cache.put(namespace2, Bytes.wrap(new byte[]{4}), cleanEntry(new byte[]{4}));
cache.flush(namespace1);
assertEquals(Collections.emptyList(), received);
}
private void shouldEvictImmediatelyIfCacheSizeIsZeroOrVerySmall(final ThreadCache cache) {
final List<ThreadCache.DirtyEntry> received = new ArrayList<>();
cache.addDirtyEntryFlushListener(namespace, received::addAll);
cache.put(namespace, Bytes.wrap(new byte[]{0}), dirtyEntry(new byte[]{0}));
assertEquals(1, received.size());
// flushing should have no further effect
cache.flush(namespace);
assertEquals(1, received.size());
}
@Test
public void shouldEvictImmediatelyIfCacheSizeIsVerySmall() {
final ThreadCache cache = new ThreadCache(logContext, 1, new MockStreamsMetrics(new Metrics()));
shouldEvictImmediatelyIfCacheSizeIsZeroOrVerySmall(cache);
}
@Test
public void shouldEvictImmediatelyIfCacheSizeIsZero() {
final ThreadCache cache = new ThreadCache(logContext, 0, new MockStreamsMetrics(new Metrics()));
shouldEvictImmediatelyIfCacheSizeIsZeroOrVerySmall(cache);
}
@Test
public void shouldEvictAfterPutAll() {
final List<ThreadCache.DirtyEntry> received = new ArrayList<>();
final ThreadCache cache = new ThreadCache(logContext, 1, new MockStreamsMetrics(new Metrics()));
cache.addDirtyEntryFlushListener(namespace, received::addAll);
cache.putAll(namespace, Arrays.asList(KeyValue.pair(Bytes.wrap(new byte[]{0}), dirtyEntry(new byte[]{5})),
KeyValue.pair(Bytes.wrap(new byte[]{1}), dirtyEntry(new byte[]{6}))));
assertEquals(2, cache.evicts());
assertEquals(2, received.size());
}
@Test
public void shouldPutAll() {
final ThreadCache cache = new ThreadCache(logContext, 100000, new MockStreamsMetrics(new Metrics()));
cache.putAll(namespace, Arrays.asList(KeyValue.pair(Bytes.wrap(new byte[]{0}), dirtyEntry(new byte[]{5})),
KeyValue.pair(Bytes.wrap(new byte[]{1}), dirtyEntry(new byte[]{6}))));
assertArrayEquals(new byte[]{5}, cache.get(namespace, Bytes.wrap(new byte[]{0})).value());
assertArrayEquals(new byte[]{6}, cache.get(namespace, Bytes.wrap(new byte[]{1})).value());
}
@Test
public void shouldNotForwardCleanEntryOnEviction() {
final ThreadCache cache = new ThreadCache(logContext, 0, new MockStreamsMetrics(new Metrics()));
final List<ThreadCache.DirtyEntry> received = new ArrayList<>();
cache.addDirtyEntryFlushListener(namespace, received::addAll);
cache.put(namespace, Bytes.wrap(new byte[]{1}), cleanEntry(new byte[]{0}));
assertEquals(0, received.size());
}
@Test
public void shouldPutIfAbsent() {
final ThreadCache cache = new ThreadCache(logContext, 100000, new MockStreamsMetrics(new Metrics()));
final Bytes key = Bytes.wrap(new byte[]{10});
final byte[] value = {30};
assertNull(cache.putIfAbsent(namespace, key, dirtyEntry(value)));
assertArrayEquals(value, cache.putIfAbsent(namespace, key, dirtyEntry(new byte[]{8})).value());
assertArrayEquals(value, cache.get(namespace, key).value());
}
@Test
public void shouldEvictAfterPutIfAbsent() {
final List<ThreadCache.DirtyEntry> received = new ArrayList<>();
final ThreadCache cache = new ThreadCache(logContext, 1, new MockStreamsMetrics(new Metrics()));
cache.addDirtyEntryFlushListener(namespace, received::addAll);
cache.putIfAbsent(namespace, Bytes.wrap(new byte[]{0}), dirtyEntry(new byte[]{5}));
cache.putIfAbsent(namespace, Bytes.wrap(new byte[]{1}), dirtyEntry(new byte[]{6}));
cache.putIfAbsent(namespace, Bytes.wrap(new byte[]{1}), dirtyEntry(new byte[]{6}));
assertEquals(3, cache.evicts());
assertEquals(3, received.size());
}
@Test
public void shouldNotLoopForEverWhenEvictingAndCurrentCacheIsEmpty() {
final int maxCacheSizeInBytes = 100;
final ThreadCache threadCache = new ThreadCache(logContext, maxCacheSizeInBytes, new MockStreamsMetrics(new Metrics()));
// trigger a put into another cache on eviction from "name"
threadCache.addDirtyEntryFlushListener(namespace, dirty -> {
// put an item into an empty cache when the total cache size
// is already > than maxCacheSizeBytes
threadCache.put(namespace1, Bytes.wrap(new byte[]{0}), dirtyEntry(new byte[2]));
});
threadCache.addDirtyEntryFlushListener(namespace1, dirty -> { });
threadCache.addDirtyEntryFlushListener(namespace2, dirty -> { });
threadCache.put(namespace2, Bytes.wrap(new byte[]{1}), dirtyEntry(new byte[1]));
threadCache.put(namespace, Bytes.wrap(new byte[]{1}), dirtyEntry(new byte[1]));
// Put a large item such that when the eldest item is removed
// cache sizeInBytes() > maxCacheSizeBytes
final int remaining = (int) (maxCacheSizeInBytes - threadCache.sizeBytes());
threadCache.put(namespace, Bytes.wrap(new byte[]{2}), dirtyEntry(new byte[remaining + 100]));
}
@Test
public void shouldCleanupNamedCacheOnClose() {
final ThreadCache cache = new ThreadCache(logContext, 100000, new MockStreamsMetrics(new Metrics()));
cache.put(namespace1, Bytes.wrap(new byte[]{1}), cleanEntry(new byte[] {1}));
cache.put(namespace2, Bytes.wrap(new byte[]{1}), cleanEntry(new byte[] {1}));
assertEquals(2, cache.size());
cache.close(namespace2);
assertEquals(1, cache.size());
assertNull(cache.get(namespace2, Bytes.wrap(new byte[]{1})));
}
@Test
public void shouldReturnNullIfKeyIsNull() {
final ThreadCache threadCache = new ThreadCache(logContext, 10, new MockStreamsMetrics(new Metrics()));
threadCache.put(namespace, Bytes.wrap(new byte[]{1}), cleanEntry(new byte[] {1}));
assertNull(threadCache.get(namespace, null));
}
@Test
public void shouldCalculateSizeInBytes() {
final ThreadCache cache = new ThreadCache(logContext, 100000, new MockStreamsMetrics(new Metrics()));
final NamedCache.LRUNode node = new NamedCache.LRUNode(Bytes.wrap(new byte[]{1}), dirtyEntry(new byte[]{0}));
cache.put(namespace1, Bytes.wrap(new byte[]{1}), cleanEntry(new byte[]{0}));
assertEquals(cache.sizeBytes(), node.size());
}
@Test
public void shouldResizeAndShrink() {
final ThreadCache cache = new ThreadCache(logContext, 10000, new MockStreamsMetrics(new Metrics()));
cache.put(namespace, Bytes.wrap(new byte[]{1}), cleanEntry(new byte[]{0}));
cache.put(namespace, Bytes.wrap(new byte[]{2}), cleanEntry(new byte[]{0}));
cache.put(namespace, Bytes.wrap(new byte[]{3}), cleanEntry(new byte[]{0}));
assertEquals(141, cache.sizeBytes());
cache.resize(100);
assertEquals(94, cache.sizeBytes());
cache.put(namespace1, Bytes.wrap(new byte[]{4}), cleanEntry(new byte[]{0}));
assertEquals(94, cache.sizeBytes());
}
private LRUCacheEntry dirtyEntry(final byte[] key) {
return new LRUCacheEntry(key, new RecordHeaders(), true, -1, -1, -1, "", rawKey, rawValue);
}
private LRUCacheEntry cleanEntry(final byte[] key) {
return new LRUCacheEntry(key);
}
}
|
ThreadCacheTest
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/output/KeyValueScanStreamingOutput.java
|
{
"start": 338,
"end": 1114
}
|
class ____<K, V> extends ScanOutput<K, V, StreamScanCursor> {
private K key;
private boolean hasKey;
private KeyValueStreamingChannel<K, V> channel;
public KeyValueScanStreamingOutput(RedisCodec<K, V> codec, KeyValueStreamingChannel<K, V> channel) {
super(codec, new StreamScanCursor());
this.channel = channel;
}
@Override
protected void setOutput(ByteBuffer bytes) {
if (!hasKey) {
key = codec.decodeKey(bytes);
hasKey = true;
return;
}
V value = (bytes == null) ? null : codec.decodeValue(bytes);
channel.onKeyValue(key, value);
output.setCount(output.getCount() + 1);
key = null;
hasKey = false;
}
}
|
KeyValueScanStreamingOutput
|
java
|
elastic__elasticsearch
|
modules/repository-gcs/src/internalClusterTest/java/org/elasticsearch/repositories/gcs/GcsProxyIntegrationTests.java
|
{
"start": 2162,
"end": 4880
}
|
class ____ extends ESBlobStoreRepositoryIntegTestCase {
private static HttpServer upstreamServer;
private static WebProxyServer proxyServer;
@BeforeClass
public static void startServers() throws Exception {
upstreamServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0);
upstreamServer.start();
proxyServer = new WebProxyServer();
}
@AfterClass
public static void stopServers() throws IOException {
upstreamServer.stop(0);
proxyServer.close();
}
@Before
public void setUpHttpServer() {
upstreamServer.createContext("/", new ForwardedViaProxyHandler(new GoogleCloudStorageHttpHandler("bucket")));
upstreamServer.createContext("/token", new ForwardedViaProxyHandler(new FakeOAuth2HttpHandler()));
}
@After
public void tearDownHttpServer() {
upstreamServer.removeContext("/");
upstreamServer.removeContext("/token");
}
@Override
protected String repositoryType() {
return GoogleCloudStorageRepository.TYPE;
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return List.of(GoogleCloudStoragePlugin.class);
}
@Override
protected Settings repositorySettings(String repoName) {
return Settings.builder()
.put(super.repositorySettings(repoName))
.put(BUCKET.getKey(), "bucket")
.put(CLIENT_NAME.getKey(), "test")
.build();
}
@Override
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
var secureSettings = new MockSecureSettings();
secureSettings.setFile(
CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace("test").getKey(),
TestUtils.createServiceAccount(random())
);
String upstreamServerUrl = "http://" + upstreamServer.getAddress().getHostString() + ":" + upstreamServer.getAddress().getPort();
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal, otherSettings))
.put(ENDPOINT_SETTING.getConcreteSettingForNamespace("test").getKey(), upstreamServerUrl)
.put(TOKEN_URI_SETTING.getConcreteSettingForNamespace("test").getKey(), upstreamServerUrl + "/token")
.put(PROXY_HOST_SETTING.getConcreteSettingForNamespace("test").getKey(), proxyServer.getHost())
.put(PROXY_PORT_SETTING.getConcreteSettingForNamespace("test").getKey(), proxyServer.getPort())
.put(PROXY_TYPE_SETTING.getConcreteSettingForNamespace("test").getKey(), "http")
.setSecureSettings(secureSettings)
.build();
}
}
|
GcsProxyIntegrationTests
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java
|
{
"start": 35583,
"end": 36853
}
|
class ____
implements SingleArcTransition<TaskImpl, TaskEvent> {
@Override
public void transition(TaskImpl task, TaskEvent event) {
TaskTAttemptEvent taskTAttemptEvent = (TaskTAttemptEvent) event;
TaskAttemptId taskAttemptId = taskTAttemptEvent.getTaskAttemptID();
task.handleTaskAttemptCompletion(
taskAttemptId,
TaskAttemptCompletionEventStatus.SUCCEEDED);
task.finishedAttempts.add(taskAttemptId);
task.inProgressAttempts.remove(taskAttemptId);
task.successfulAttempt = taskAttemptId;
task.sendTaskSucceededEvents();
for (TaskAttempt attempt : task.attempts.values()) {
if (attempt.getID() != task.successfulAttempt &&
// This is okay because it can only talk us out of sending a
// TA_KILL message to an attempt that doesn't need one for
// other reasons.
!attempt.isFinished()) {
LOG.info("Issuing kill to other attempt " + attempt.getID());
task.eventHandler.handle(new TaskAttemptKillEvent(attempt.getID(),
SPECULATION + task.successfulAttempt + " succeeded first!"));
}
}
task.finished(TaskStateInternal.SUCCEEDED);
}
}
private static
|
AttemptSucceededTransition
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/clients/admin/ListTransactionsOptions.java
|
{
"start": 1039,
"end": 5788
}
|
class ____ extends AbstractOptions<ListTransactionsOptions> {
private Set<TransactionState> filteredStates = Collections.emptySet();
private Set<Long> filteredProducerIds = Collections.emptySet();
private long filteredDuration = -1L;
private String filteredTransactionalIdPattern;
/**
* Filter only the transactions that are in a specific set of states. If no filter
* is specified or if the passed set of states is empty, then transactions in all
* states will be returned.
*
* @param states the set of states to filter by
* @return this object
*/
public ListTransactionsOptions filterStates(Collection<TransactionState> states) {
this.filteredStates = new HashSet<>(states);
return this;
}
/**
* Filter only the transactions from producers in a specific set of producerIds.
* If no filter is specified or if the passed collection of producerIds is empty,
* then the transactions of all producerIds will be returned.
*
* @param producerIdFilters the set of producerIds to filter by
* @return this object
*/
public ListTransactionsOptions filterProducerIds(Collection<Long> producerIdFilters) {
this.filteredProducerIds = new HashSet<>(producerIdFilters);
return this;
}
/**
* Filter only the transactions that are running longer than the specified duration.
* If no filter is specified or if the passed duration ms is less than 0,
* then the all transactions will be returned.
*
* @param durationMs the duration in milliseconds to filter by
* @return this object
*/
public ListTransactionsOptions filterOnDuration(long durationMs) {
this.filteredDuration = durationMs;
return this;
}
/**
* Filter only the transactions that match with the given transactional ID pattern.
* If the filter is null or if the passed string is empty,
* then all the transactions will be returned.
*
* @param pattern the transactional ID regular expression pattern to filter by
* @return this object
*/
public ListTransactionsOptions filterOnTransactionalIdPattern(String pattern) {
this.filteredTransactionalIdPattern = pattern;
return this;
}
/**
* Returns the set of states to be filtered or empty if no states have been specified.
*
* @return the current set of filtered states (empty means that no states are filtered and
* all transactions will be returned)
*/
public Set<TransactionState> filteredStates() {
return filteredStates;
}
/**
* Returns the set of producerIds that are being filtered or empty if none have been specified.
*
* @return the current set of filtered states (empty means that no producerIds are filtered and
* all transactions will be returned)
*/
public Set<Long> filteredProducerIds() {
return filteredProducerIds;
}
/**
* Returns the duration ms value being filtered.
*
* @return the current duration filter value in ms (negative value means transactions are not filtered by duration)
*/
public long filteredDuration() {
return filteredDuration;
}
/**
* Returns transactional ID being filtered.
*
* @return the current transactional ID pattern filter (empty means no transactional IDs are filtered and all
* transactions will be returned)
*/
public String filteredTransactionalIdPattern() {
return filteredTransactionalIdPattern;
}
@Override
public String toString() {
return "ListTransactionsOptions(" +
"filteredStates=" + filteredStates +
", filteredProducerIds=" + filteredProducerIds +
", filteredDuration=" + filteredDuration +
", filteredTransactionalIdPattern=" + filteredTransactionalIdPattern +
", timeoutMs=" + timeoutMs +
')';
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ListTransactionsOptions that = (ListTransactionsOptions) o;
return Objects.equals(filteredStates, that.filteredStates) &&
Objects.equals(filteredProducerIds, that.filteredProducerIds) &&
Objects.equals(filteredDuration, that.filteredDuration) &&
Objects.equals(filteredTransactionalIdPattern, that.filteredTransactionalIdPattern);
}
@Override
public int hashCode() {
return Objects.hash(filteredStates, filteredProducerIds, filteredDuration, filteredTransactionalIdPattern);
}
}
|
ListTransactionsOptions
|
java
|
apache__camel
|
components/camel-infinispan/camel-infinispan-embedded/src/test/java/org/apache/camel/component/infinispan/embedded/spring/SpringInfinispanEmbeddedIdempotentRepositoryCamelTest.java
|
{
"start": 1096,
"end": 1496
}
|
class ____
extends SpringInfinispanEmbeddedIdempotentRepositoryTestSupport {
@Override
protected AbstractApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext(
"org/apache/camel/component/infinispan/spring/SpringInfinispanEmbeddedIdempotentRepositoryCamelTest.xml");
}
}
|
SpringInfinispanEmbeddedIdempotentRepositoryCamelTest
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/observable/ObservableCombineLatestTests.java
|
{
"start": 823,
"end": 2490
}
|
class ____ extends RxJavaTest {
/**
* This won't compile if super/extends isn't done correctly on generics.
*/
@Test
public void covarianceOfCombineLatest() {
Observable<HorrorMovie> horrors = Observable.just(new HorrorMovie());
Observable<CoolRating> ratings = Observable.just(new CoolRating());
Observable.<Movie, CoolRating, Result> combineLatest(horrors, ratings, combine).blockingForEach(action);
Observable.<Movie, CoolRating, Result> combineLatest(horrors, ratings, combine).blockingForEach(action);
Observable.<Media, Rating, ExtendedResult> combineLatest(horrors, ratings, combine).blockingForEach(extendedAction);
Observable.<Media, Rating, Result> combineLatest(horrors, ratings, combine).blockingForEach(action);
Observable.<Media, Rating, ExtendedResult> combineLatest(horrors, ratings, combine).blockingForEach(action);
Observable.<Movie, CoolRating, Result> combineLatest(horrors, ratings, combine);
}
BiFunction<Media, Rating, ExtendedResult> combine = new BiFunction<Media, Rating, ExtendedResult>() {
@Override
public ExtendedResult apply(Media m, Rating r) {
return new ExtendedResult();
}
};
Consumer<Result> action = new Consumer<Result>() {
@Override
public void accept(Result t1) {
System.out.println("Result: " + t1);
}
};
Consumer<ExtendedResult> extendedAction = new Consumer<ExtendedResult>() {
@Override
public void accept(ExtendedResult t1) {
System.out.println("Result: " + t1);
}
};
}
|
ObservableCombineLatestTests
|
java
|
quarkusio__quarkus
|
extensions/amazon-lambda-http/runtime/src/main/java/io/quarkus/amazon/lambda/http/DefaultLambdaIdentityProvider.java
|
{
"start": 603,
"end": 4420
}
|
class ____ implements IdentityProvider<DefaultLambdaAuthenticationRequest> {
@Override
public Class<DefaultLambdaAuthenticationRequest> getRequestType() {
return DefaultLambdaAuthenticationRequest.class;
}
@Override
public Uni<SecurityIdentity> authenticate(DefaultLambdaAuthenticationRequest request,
AuthenticationRequestContext context) {
APIGatewayV2HTTPEvent event = request.getEvent();
SecurityIdentity identity = authenticate(event, LambdaHttpRecorder.config.mapCognitoToRoles());
if (identity == null) {
return Uni.createFrom().optional(Optional.empty());
}
return Uni.createFrom().item(identity);
}
/**
* Create a SecurityIdentity with a principal derived from APIGatewayV2HTTPEvent.
* Looks for Cognito JWT, IAM, or Custom Lambda metadata for principal name
*
* Cognito JWTs will automatically add Cognito groups as Quarkus roles
*
* @param event
* @param groups add "cognito:groups" to SecurityIdentity roles
* @return
*/
public static SecurityIdentity authenticate(APIGatewayV2HTTPEvent event, boolean groups) {
Principal principal = getPrincipal(event);
if (principal == null) {
return null;
}
QuarkusSecurityIdentity.Builder builder = QuarkusSecurityIdentity.builder();
builder.setPrincipal(principal);
if (groups) {
if (principal instanceof CognitoPrincipal) {
CognitoPrincipal cognito = (CognitoPrincipal) principal;
for (String group : cognito.getGroups()) {
builder.addRole(group);
}
}
}
return builder.build();
}
protected static Principal getPrincipal(APIGatewayV2HTTPEvent request) {
final Map<String, String> systemEnvironment = System.getenv();
final boolean isSamLocal = Boolean.parseBoolean(systemEnvironment.get("AWS_SAM_LOCAL"));
final APIGatewayV2HTTPEvent.RequestContext requestContext = request.getRequestContext();
if (isSamLocal && (requestContext == null || requestContext.getAuthorizer() == null)) {
final String forcedUserName = systemEnvironment.get("QUARKUS_AWS_LAMBDA_FORCE_USER_NAME");
if (forcedUserName != null && !forcedUserName.isEmpty()) {
return new QuarkusPrincipal(forcedUserName);
}
} else {
if (requestContext != null) {
final APIGatewayV2HTTPEvent.RequestContext.Authorizer authorizer = requestContext.getAuthorizer();
if (authorizer != null) {
if (authorizer.getJwt() != null) {
final APIGatewayV2HTTPEvent.RequestContext.Authorizer.JWT jwt = authorizer.getJwt();
final Map<String, String> claims = jwt.getClaims();
if (claims != null && claims.containsKey("cognito:username")) {
return new CognitoPrincipal(jwt);
}
} else if (authorizer.getIam() != null) {
if (authorizer.getIam().getUserId() != null) {
return new IAMPrincipal(authorizer.getIam());
}
} else if (authorizer.getLambda() != null) {
Object tmp = authorizer.getLambda().get("principalId");
if (tmp != null && tmp instanceof String) {
String username = (String) tmp;
return new CustomPrincipal(username, authorizer.getLambda());
}
}
}
}
}
return null;
}
}
|
DefaultLambdaIdentityProvider
|
java
|
apache__camel
|
dsl/camel-jbang/camel-jbang-core/src/main/java/org/apache/camel/dsl/jbang/core/commands/config/ConfigGet.java
|
{
"start": 1269,
"end": 2065
}
|
class ____ extends CamelCommand {
@CommandLine.Parameters(description = "Configuration key", arity = "1")
String key;
@CommandLine.Option(names = { "--global" }, description = "Use global or local configuration")
boolean global = true;
public ConfigGet(CamelJBangMain main) {
super(main);
}
@Override
public Integer doCall() throws Exception {
CommandLineHelper.loadProperties(properties -> {
Optional<Object> maybeProperty = Optional.ofNullable(properties.get(key));
if (maybeProperty.isPresent()) {
printer().println(String.valueOf(maybeProperty.get()));
} else {
printer().println(key + " key not found");
}
}, !global);
return 0;
}
}
|
ConfigGet
|
java
|
spring-projects__spring-security
|
config/src/main/java/org/springframework/security/config/http/SessionCreationPolicy.java
|
{
"start": 913,
"end": 1396
}
|
enum ____ {
/**
* Always create an {@link HttpSession}
*/
ALWAYS,
/**
* Spring Security will never create an {@link HttpSession}, but will use the
* {@link HttpSession} if it already exists
*/
NEVER,
/**
* Spring Security will only create an {@link HttpSession} if required
*/
IF_REQUIRED,
/**
* Spring Security will never create an {@link HttpSession} and it will never use it
* to obtain the {@link SecurityContext}
*/
STATELESS
}
|
SessionCreationPolicy
|
java
|
apache__spark
|
common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RemoteBlockPushResolver.java
|
{
"start": 77695,
"end": 78538
}
|
class ____ {
public final String appId;
public final int attemptId;
@JsonCreator
public AppAttemptId(
@JsonProperty("appId") String appId,
@JsonProperty("attemptId") int attemptId) {
this.appId = appId;
this.attemptId = attemptId;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AppAttemptId appAttemptId = (AppAttemptId) o;
return attemptId == appAttemptId.attemptId &&
Objects.equals(appId, appAttemptId.appId);
}
@Override
public int hashCode() {
return Objects.hash(appId, attemptId);
}
@Override
public String toString() {
return String.format("Application %s_%s", appId, attemptId);
}
}
/**
* Wrapper
|
AppAttemptId
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/bytes/Bytes_assertIsStrictlyBetween_Test.java
|
{
"start": 1554,
"end": 3865
}
|
class ____ extends BytesBaseTest {
private static final Byte ZERO = (byte) 0;
private static final Byte ONE = (byte) 1;
private static final Byte TWO = (byte) 2;
private static final Byte TEN = (byte) 10;
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> bytes.assertIsStrictlyBetween(someInfo(), null, ZERO, ONE))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_start_is_null() {
assertThatNullPointerException().isThrownBy(() -> bytes.assertIsStrictlyBetween(someInfo(), ONE, null, ONE));
}
@Test
void should_fail_if_end_is_null() {
assertThatNullPointerException().isThrownBy(() -> bytes.assertIsStrictlyBetween(someInfo(), ONE, ZERO, null));
}
@Test
void should_pass_if_actual_is_in_range() {
bytes.assertIsStrictlyBetween(someInfo(), ONE, ZERO, TEN);
}
@Test
void should_fail_if_actual_is_equal_to_range_start() {
AssertionInfo info = someInfo();
Throwable error = catchThrowable(() -> bytes.assertIsStrictlyBetween(info, ONE, ONE, TEN));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldBeBetween(ONE, ONE, TEN, false, false));
}
@Test
void should_fail_if_actual_is_equal_to_range_end() {
AssertionInfo info = someInfo();
Throwable error = catchThrowable(() -> bytes.assertIsStrictlyBetween(info, ONE, ZERO, ONE));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldBeBetween(ONE, ZERO, ONE, false, false));
}
@Test
void should_fail_if_actual_is_not_in_range_start() {
AssertionInfo info = someInfo();
Throwable error = catchThrowable(() -> bytes.assertIsStrictlyBetween(info, ONE, TWO, TEN));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldBeBetween(ONE, TWO, TEN, false, false));
}
@Test
void should_fail_if_actual_is_not_in_range_end() {
assertThatIllegalArgumentException().isThrownBy(() -> bytes.assertIsStrictlyBetween(someInfo(), ONE, ZERO, ZERO))
.withMessage("The end value <0> must not be less than or equal to the start value <0>!");
}
}
|
Bytes_assertIsStrictlyBetween_Test
|
java
|
quarkusio__quarkus
|
integration-tests/test-extension/extension/deployment/src/test/java/io/quarkus/config/SecretKeysConfigTest.java
|
{
"start": 1390,
"end": 1449
}
|
interface ____ {
String secret();
}
}
|
MappingSecret
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java
|
{
"start": 1491,
"end": 1779
}
|
class ____ extends ActionType<NodeAcknowledgedResponse> {
public static final OpenJobAction INSTANCE = new OpenJobAction();
public static final String NAME = "cluster:admin/xpack/ml/job/open";
private OpenJobAction() {
super(NAME);
}
public static
|
OpenJobAction
|
java
|
apache__flink
|
flink-core/src/test/java/org/apache/flink/api/common/eventtime/WatermarksWithIdlenessTest.java
|
{
"start": 1340,
"end": 4358
}
|
class ____ {
@Test
void testZeroTimeout() {
assertThatThrownBy(
() ->
new WatermarksWithIdleness<>(
new AscendingTimestampsWatermarks<>(),
Duration.ZERO,
SystemClock.getInstance()))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
void testNegativeTimeout() {
assertThatThrownBy(
() ->
new WatermarksWithIdleness<>(
new AscendingTimestampsWatermarks<>(),
Duration.ofMillis(-1L),
SystemClock.getInstance()))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
void testInitiallyActive() {
final ManualClock clock = new ManualClock(System.nanoTime());
final IdlenessTimer timer = new IdlenessTimer(clock, Duration.ofMillis(10));
assertThat(timer.checkIfIdle()).isFalse();
}
@Test
void testIdleWithoutEvents() {
final ManualClock clock = new ManualClock(System.nanoTime());
final IdlenessTimer timer = new IdlenessTimer(clock, Duration.ofMillis(10));
timer.checkIfIdle(); // start timer
clock.advanceTime(11, MILLISECONDS);
assertThat(timer.checkIfIdle()).isTrue();
}
@Test
void testRepeatedIdleChecks() {
final ManualClock clock = new ManualClock(System.nanoTime());
final IdlenessTimer timer = createTimerAndMakeIdle(clock, Duration.ofMillis(122));
assertThat(timer.checkIfIdle()).isTrue();
clock.advanceTime(100, MILLISECONDS);
assertThat(timer.checkIfIdle()).isTrue();
}
@Test
void testActiveAfterIdleness() {
final ManualClock clock = new ManualClock(System.nanoTime());
final IdlenessTimer timer = createTimerAndMakeIdle(clock, Duration.ofMillis(10));
timer.activity();
assertThat(timer.checkIfIdle()).isFalse();
}
@Test
void testIdleActiveIdle() {
final ManualClock clock = new ManualClock(System.nanoTime());
final IdlenessTimer timer = createTimerAndMakeIdle(clock, Duration.ofMillis(122));
// active again
timer.activity();
assertThat(timer.checkIfIdle()).isFalse();
// idle again
timer.checkIfIdle(); // start timer
clock.advanceTime(Duration.ofMillis(123));
assertThat(timer.checkIfIdle()).isTrue();
}
private static IdlenessTimer createTimerAndMakeIdle(ManualClock clock, Duration idleTimeout) {
final IdlenessTimer timer = new IdlenessTimer(clock, idleTimeout);
timer.checkIfIdle(); // start timer
clock.advanceTime(idleTimeout.plusMillis(1));
assertThat(timer.checkIfIdle()).isTrue(); // rigger timer
return timer;
}
}
|
WatermarksWithIdlenessTest
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/ScpEndpointBuilderFactory.java
|
{
"start": 1578,
"end": 16156
}
|
interface ____
extends
EndpointProducerBuilder {
default AdvancedScpEndpointBuilder advanced() {
return (AdvancedScpEndpointBuilder) this;
}
/**
* If provided, then Camel will write a checksum file when the original
* file has been written. The checksum file will contain the checksum
* created with the provided algorithm for the original file. The
* checksum file will always be written in the same folder as the
* original file.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param checksumFileAlgorithm the value to set
* @return the dsl builder
*/
default ScpEndpointBuilder checksumFileAlgorithm(String checksumFileAlgorithm) {
doSetProperty("checksumFileAlgorithm", checksumFileAlgorithm);
return this;
}
/**
* Allows you to set chmod on the stored file. For example chmod=664.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: 664
* Group: producer
*
* @param chmod the value to set
* @return the dsl builder
*/
default ScpEndpointBuilder chmod(String chmod) {
doSetProperty("chmod", chmod);
return this;
}
/**
* Whether or not to disconnect from remote FTP server right after use.
* Disconnect will only disconnect the current connection to the FTP
* server. If you have a consumer which you want to stop, then you need
* to stop the consumer/route instead.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param disconnect the value to set
* @return the dsl builder
*/
default ScpEndpointBuilder disconnect(boolean disconnect) {
doSetProperty("disconnect", disconnect);
return this;
}
/**
* Whether or not to disconnect from remote FTP server right after use.
* Disconnect will only disconnect the current connection to the FTP
* server. If you have a consumer which you want to stop, then you need
* to stop the consumer/route instead.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param disconnect the value to set
* @return the dsl builder
*/
default ScpEndpointBuilder disconnect(String disconnect) {
doSetProperty("disconnect", disconnect);
return this;
}
/**
* Use Expression such as File Language to dynamically set the filename.
* For consumers, it's used as a filename filter. For producers, it's
* used to evaluate the filename to write. If an expression is set, it
* take precedence over the CamelFileName header. (Note: The header
* itself can also be an Expression). The expression options support
* both String and Expression types. If the expression is a String type,
* it is always evaluated using the File Language. If the expression is
* an Expression type, the specified Expression type is used - this
* allows you, for instance, to use OGNL expressions. For the consumer,
* you can use it to filter filenames, so you can for instance consume
* today's file using the File Language syntax:
* mydata-${date:now:yyyyMMdd}.txt. The producers support the
* CamelOverruleFileName header which takes precedence over any existing
* CamelFileName header; the CamelOverruleFileName is a header that is
* used only once, and makes it easier as this avoids to temporary store
* CamelFileName and have to restore it afterwards.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param fileName the value to set
* @return the dsl builder
*/
default ScpEndpointBuilder fileName(String fileName) {
doSetProperty("fileName", fileName);
return this;
}
/**
* Flatten is used to flatten the file name path to strip any leading
* paths, so it's just the file name. This allows you to consume
* recursively into sub-directories, but when you eg write the files to
* another directory they will be written in a single directory. Setting
* this to true on the producer enforces that any file name in
* CamelFileName header will be stripped for any leading paths.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param flatten the value to set
* @return the dsl builder
*/
default ScpEndpointBuilder flatten(boolean flatten) {
doSetProperty("flatten", flatten);
return this;
}
/**
* Flatten is used to flatten the file name path to strip any leading
* paths, so it's just the file name. This allows you to consume
* recursively into sub-directories, but when you eg write the files to
* another directory they will be written in a single directory. Setting
* this to true on the producer enforces that any file name in
* CamelFileName header will be stripped for any leading paths.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param flatten the value to set
* @return the dsl builder
*/
default ScpEndpointBuilder flatten(String flatten) {
doSetProperty("flatten", flatten);
return this;
}
/**
* Used for jailing (restricting) writing files to the starting
* directory (and sub) only. This is enabled by default to not allow
* Camel to write files to outside directories (to be more secured out
* of the box). You can turn this off to allow writing files to
* directories outside the starting directory, such as parent or root
* folders.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: producer
*
* @param jailStartingDirectory the value to set
* @return the dsl builder
*/
default ScpEndpointBuilder jailStartingDirectory(boolean jailStartingDirectory) {
doSetProperty("jailStartingDirectory", jailStartingDirectory);
return this;
}
/**
* Used for jailing (restricting) writing files to the starting
* directory (and sub) only. This is enabled by default to not allow
* Camel to write files to outside directories (to be more secured out
* of the box). You can turn this off to allow writing files to
* directories outside the starting directory, such as parent or root
* folders.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: producer
*
* @param jailStartingDirectory the value to set
* @return the dsl builder
*/
default ScpEndpointBuilder jailStartingDirectory(String jailStartingDirectory) {
doSetProperty("jailStartingDirectory", jailStartingDirectory);
return this;
}
/**
* Sets whether to use strict host key checking. Possible values are:
* no, yes.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: no
* Group: producer
*
* @param strictHostKeyChecking the value to set
* @return the dsl builder
*/
default ScpEndpointBuilder strictHostKeyChecking(String strictHostKeyChecking) {
doSetProperty("strictHostKeyChecking", strictHostKeyChecking);
return this;
}
/**
* Sets the known_hosts file, so that the jsch endpoint can do host key
* verification. You can prefix with classpath: to load the file from
* classpath instead of file system.
*
* This option can also be loaded from an existing file, by prefixing
* with file: or classpath: followed by the location of the file.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param knownHostsFile the value to set
* @return the dsl builder
*/
default ScpEndpointBuilder knownHostsFile(String knownHostsFile) {
doSetProperty("knownHostsFile", knownHostsFile);
return this;
}
/**
* Password to use for login.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param password the value to set
* @return the dsl builder
*/
default ScpEndpointBuilder password(String password) {
doSetProperty("password", password);
return this;
}
/**
* Set a comma separated list of authentications that will be used in
* order of preference. Possible authentication methods are defined by
* JCraft JSCH. Some examples include:
* gssapi-with-mic,publickey,keyboard-interactive,password If not
* specified the JSCH and/or system defaults will be used.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param preferredAuthentications the value to set
* @return the dsl builder
*/
default ScpEndpointBuilder preferredAuthentications(String preferredAuthentications) {
doSetProperty("preferredAuthentications", preferredAuthentications);
return this;
}
/**
* Set the private key bytes to that the endpoint can do private key
* verification. This must be used only if privateKeyFile wasn't set.
* Otherwise the file will have the priority.
*
* The option is a: <code>byte[]</code> type.
*
* Group: security
*
* @param privateKeyBytes the value to set
* @return the dsl builder
*/
default ScpEndpointBuilder privateKeyBytes(byte[] privateKeyBytes) {
doSetProperty("privateKeyBytes", privateKeyBytes);
return this;
}
/**
* Set the private key bytes to that the endpoint can do private key
* verification. This must be used only if privateKeyFile wasn't set.
* Otherwise the file will have the priority.
*
* The option will be converted to a <code>byte[]</code> type.
*
* Group: security
*
* @param privateKeyBytes the value to set
* @return the dsl builder
*/
default ScpEndpointBuilder privateKeyBytes(String privateKeyBytes) {
doSetProperty("privateKeyBytes", privateKeyBytes);
return this;
}
/**
* Set the private key file to that the endpoint can do private key
* verification. You can prefix with classpath: to load the file from
* classpath instead of file system.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param privateKeyFile the value to set
* @return the dsl builder
*/
default ScpEndpointBuilder privateKeyFile(String privateKeyFile) {
doSetProperty("privateKeyFile", privateKeyFile);
return this;
}
/**
* Set the private key file passphrase to that the endpoint can do
* private key verification.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param privateKeyFilePassphrase the value to set
* @return the dsl builder
*/
default ScpEndpointBuilder privateKeyFilePassphrase(String privateKeyFilePassphrase) {
doSetProperty("privateKeyFilePassphrase", privateKeyFilePassphrase);
return this;
}
/**
* Username to use for login.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param username the value to set
* @return the dsl builder
*/
default ScpEndpointBuilder username(String username) {
doSetProperty("username", username);
return this;
}
/**
* If knownHostFile has not been explicit configured, then use the host
* file from System.getProperty(user.home) /.ssh/known_hosts.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: security
*
* @param useUserKnownHostsFile the value to set
* @return the dsl builder
*/
default ScpEndpointBuilder useUserKnownHostsFile(boolean useUserKnownHostsFile) {
doSetProperty("useUserKnownHostsFile", useUserKnownHostsFile);
return this;
}
/**
* If knownHostFile has not been explicit configured, then use the host
* file from System.getProperty(user.home) /.ssh/known_hosts.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: security
*
* @param useUserKnownHostsFile the value to set
* @return the dsl builder
*/
default ScpEndpointBuilder useUserKnownHostsFile(String useUserKnownHostsFile) {
doSetProperty("useUserKnownHostsFile", useUserKnownHostsFile);
return this;
}
}
/**
* Advanced builder for endpoint for the SCP component.
*/
public
|
ScpEndpointBuilder
|
java
|
quarkusio__quarkus
|
extensions/spring-data-rest/deployment/src/test/java/io/quarkus/spring/data/rest/JpaResourceTest.java
|
{
"start": 959,
"end": 19621
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(AbstractEntity.class, Record.class, JpaRecordsRepository.class)
.addAsResource("application.properties")
.addAsResource("import.sql"));
@Test
void shouldGet() {
given().accept("application/json")
.when().get("/jpa-records/1")
.then().statusCode(200)
.and().body("id", is(equalTo(1)))
.and().body("name", is(equalTo("first")));
}
@Test
void shouldNotGetNonExistent() {
given().accept("application/json")
.when().get("/jpa-records/1000")
.then().statusCode(404);
}
@Test
void shouldGetHal() {
given().accept("application/hal+json")
.when().get("/jpa-records/1")
.then().statusCode(200)
.and().body("id", is(equalTo(1)))
.and().body("name", is(equalTo("first")))
.and().body("_links.add.href", endsWith("/jpa-records"))
.and().body("_links.list.href", endsWith("/jpa-records"))
.and().body("_links.self.href", endsWith("/jpa-records/1"))
.and().body("_links.update.href", endsWith("/jpa-records/1"))
.and().body("_links.remove.href", endsWith("/jpa-records/1"));
}
@Test
void shouldNotGetNonExistentHal() {
given().accept("application/hal+json")
.when().get("/jpa-records/1000")
.then().statusCode(404);
}
@Test
void shouldList() {
Response response = given().accept("application/json")
.when().get("/jpa-records")
.thenReturn();
assertThat(response.statusCode()).isEqualTo(200);
assertThat(response.body().jsonPath().getList("id")).contains(1, 2);
assertThat(response.body().jsonPath().getList("name")).contains("first", "second");
Map<String, String> expectedLinks = new HashMap<>(2);
expectedLinks.put("first", "/jpa-records?page=0&size=20");
expectedLinks.put("last", "/jpa-records?page=0&size=20");
assertLinks(response.headers(), expectedLinks);
}
@Test
void shouldListHal() {
given().accept("application/hal+json")
.when().get("/jpa-records")
.then().statusCode(200).log().all()
.and().body("_embedded.jpa-records.id", hasItems(1, 2))
.and().body("_embedded.jpa-records.name", hasItems("first", "second"))
.and()
.body("_embedded.jpa-records._links.add.href",
hasItems(endsWith("/jpa-records"), endsWith("/jpa-records")))
.and()
.body("_embedded.jpa-records._links.list.href",
hasItems(endsWith("/jpa-records"), endsWith("/jpa-records")))
.and()
.body("_embedded.jpa-records._links.self.href",
hasItems(endsWith("/jpa-records/1"), endsWith("/jpa-records/2")))
.and()
.body("_embedded.jpa-records._links.update.href",
hasItems(endsWith("/jpa-records/1"), endsWith("/jpa-records/2")))
.and()
.body("_embedded.jpa-records._links.remove.href",
hasItems(endsWith("/jpa-records/1"), endsWith("/jpa-records/2")))
.and().body("_links.add.href", endsWith("/jpa-records"))
.and().body("_links.list.href", endsWith("/jpa-records"))
.and().body("_links.first.href", endsWith("/jpa-records?page=0&size=20"))
.and().body("_links.last.href", endsWith("/jpa-records?page=0&size=20"));
}
@Test
void shouldListFirstPage() {
Response initResponse = given().accept("application/json")
.when().get("/jpa-records")
.thenReturn();
List<Integer> ids = initResponse.body().jsonPath().getList("id");
List<String> names = initResponse.body().jsonPath().getList("name");
int lastPage = ids.size() - 1;
Response response = given().accept("application/json")
.and().queryParam("page", 0)
.and().queryParam("size", 1)
.when().get("/jpa-records")
.thenReturn();
assertThat(response.statusCode()).isEqualTo(200);
assertThat(response.body().jsonPath().getList("id")).containsOnly(ids.get(0));
assertThat(response.body().jsonPath().getList("name")).containsOnly(names.get(0));
Map<String, String> expectedLinks = new HashMap<>(3);
expectedLinks.put("first", "/jpa-records?page=0&size=1");
expectedLinks.put("last", "/jpa-records?page=" + lastPage + "&size=1");
expectedLinks.put("next", "/jpa-records?page=1&size=1");
assertLinks(response.headers(), expectedLinks);
}
@Test
void shouldListFirstPageHal() {
Response initResponse = given().accept("application/json")
.when().get("/jpa-records")
.thenReturn();
List<Integer> ids = initResponse.body().jsonPath().getList("id");
List<String> names = initResponse.body().jsonPath().getList("name");
int lastPage = ids.size() - 1;
given().accept("application/hal+json")
.and().queryParam("page", 0)
.and().queryParam("size", 1)
.when().get("/jpa-records")
.then().statusCode(200)
.and().body("_embedded.jpa-records.id", contains(ids.get(0)))
.and().body("_embedded.jpa-records.name", contains(names.get(0)))
.and()
.body("_embedded.jpa-records._links.add.href",
hasItems(endsWith("/jpa-records"), endsWith("/jpa-records")))
.and()
.body("_embedded.jpa-records._links.list.href",
hasItems(endsWith("/jpa-records"), endsWith("/jpa-records")))
.and()
.body("_embedded.jpa-records._links.self.href",
contains(endsWith("/jpa-records/" + ids.get(0))))
.and()
.body("_embedded.jpa-records._links.update.href",
contains(endsWith("/jpa-records/" + ids.get(0))))
.and()
.body("_embedded.jpa-records._links.remove.href",
contains(endsWith("/jpa-records/" + ids.get(0))))
.and().body("_links.add.href", endsWith("/jpa-records"))
.and().body("_links.list.href", endsWith("/jpa-records"))
.and().body("_links.first.href", endsWith("/jpa-records?page=0&size=1"))
.and().body("_links.last.href", endsWith("/jpa-records?page=" + lastPage + "&size=1"))
.and().body("_links.next.href", endsWith("/jpa-records?page=1&size=1"));
}
@Test
void shouldListLastPage() {
Response initResponse = given().accept("application/json")
.when().get("/jpa-records")
.thenReturn();
List<Integer> ids = initResponse.body().jsonPath().getList("id");
List<String> names = initResponse.body().jsonPath().getList("name");
int lastPage = ids.size() - 1;
Response response = given().accept("application/json")
.and().queryParam("page", lastPage)
.and().queryParam("size", 1)
.when().get("/jpa-records")
.thenReturn();
assertThat(response.statusCode()).isEqualTo(200);
assertThat(response.body().jsonPath().getList("id")).containsOnly(ids.get(lastPage));
assertThat(response.body().jsonPath().getList("name")).containsOnly(names.get(lastPage));
Map<String, String> expectedLinks = new HashMap<>(3);
expectedLinks.put("first", "/jpa-records?page=0&size=1");
expectedLinks.put("last", "/jpa-records?page=" + lastPage + "&size=1");
expectedLinks.put("previous", "/jpa-records?page=" + (lastPage - 1) + "&size=1");
assertLinks(response.headers(), expectedLinks);
}
@Test
void shouldListLastPageHal() {
Response initResponse = given().accept("application/json")
.when().get("/jpa-records")
.thenReturn();
List<Integer> ids = initResponse.body().jsonPath().getList("id");
List<String> names = initResponse.body().jsonPath().getList("name");
int lastPage = ids.size() - 1;
given().accept("application/hal+json")
.and().queryParam("page", lastPage)
.and().queryParam("size", 1)
.when().get("/jpa-records")
.then().statusCode(200)
.and().body("_embedded.jpa-records.id", contains(ids.get(lastPage)))
.and().body("_embedded.jpa-records.name", contains(names.get(lastPage)))
.and()
.body("_embedded.jpa-records._links.add.href",
hasItems(endsWith("/jpa-records"), endsWith("/jpa-records")))
.and()
.body("_embedded.jpa-records._links.list.href",
hasItems(endsWith("/jpa-records"), endsWith("/jpa-records")))
.and()
.body("_embedded.jpa-records._links.self.href",
contains(endsWith("/jpa-records/" + ids.get(lastPage))))
.and()
.body("_embedded.jpa-records._links.update.href",
contains(endsWith("/jpa-records/" + ids.get(lastPage))))
.and()
.body("_embedded.jpa-records._links.remove.href",
contains(endsWith("/jpa-records/" + ids.get(lastPage))))
.and().body("_links.add.href", endsWith("/jpa-records"))
.and().body("_links.list.href", endsWith("/jpa-records"))
.and().body("_links.first.href", endsWith("/jpa-records?page=0&size=1"))
.and().body("_links.last.href", endsWith("/jpa-records?page=" + lastPage + "&size=1"))
.and().body("_links.previous.href", endsWith("/jpa-records?page=" + (lastPage - 1) + "&size=1"));
}
@Test
void shouldNotGetNonExistentPage() {
given().accept("application/json")
.and().queryParam("page", 100)
.when().get("/jpa-records")
.then().statusCode(200)
.and().body("id", is(empty()));
}
@Test
void shouldNotGetNegativePageOrSize() {
given().accept("application/json")
.and().queryParam("page", -1)
.and().queryParam("size", -1)
.when().get("/jpa-records")
.then().statusCode(200)
// Invalid page and size parameters are replaced with defaults
.and().body("id", hasItems(1, 2));
}
@Test
void shouldListAscending() {
Response response = given().accept("application/json")
.when().get("/jpa-records?sort=name,id")
.thenReturn();
List<String> actualNames = response.body().jsonPath().getList("name");
List<String> expectedNames = new LinkedList<>(actualNames);
expectedNames.sort(Comparator.naturalOrder());
assertThat(actualNames).isEqualTo(expectedNames);
}
@Test
void shouldListDescending() {
Response response = given().accept("application/json")
.when().get("/jpa-records?sort=-name,id")
.thenReturn();
List<String> actualNames = response.body().jsonPath().getList("name");
List<String> expectedNames = new LinkedList<>(actualNames);
expectedNames.sort(Comparator.reverseOrder());
assertThat(actualNames).isEqualTo(expectedNames);
}
@Test
void shouldCreate() {
Response response = given().accept("application/json")
.and().contentType("application/json")
.and().body("{\"name\": \"test-create\"}")
.when().post("/jpa-records")
.thenReturn();
assertThat(response.statusCode()).isEqualTo(201);
String location = response.header("Location");
int id = Integer.parseInt(location.substring(response.header("Location").lastIndexOf("/") + 1));
JsonPath body = response.body().jsonPath();
assertThat(body.getInt("id")).isEqualTo(id);
assertThat(body.getString("name")).isEqualTo("test-create");
given().accept("application/json")
.when().get(location)
.then().statusCode(200)
.and().body("id", is(equalTo(id)))
.and().body("name", is(equalTo("test-create")));
}
@Test
void shouldCreateHal() {
Response response = given().accept("application/hal+json")
.and().contentType("application/json")
.and().body("{\"name\": \"test-create-hal\"}")
.when().post("/jpa-records")
.thenReturn();
assertThat(response.statusCode()).isEqualTo(201);
String location = response.header("Location");
int id = Integer.parseInt(location.substring(response.header("Location").lastIndexOf("/") + 1));
JsonPath body = response.body().jsonPath();
assertThat(body.getInt("id")).isEqualTo(id);
assertThat(body.getString("name")).isEqualTo("test-create-hal");
assertThat(body.getString("_links.add.href")).endsWith("/jpa-records");
assertThat(body.getString("_links.list.href")).endsWith("/jpa-records");
assertThat(body.getString("_links.self.href")).endsWith("/jpa-records/" + id);
assertThat(body.getString("_links.update.href")).endsWith("/jpa-records/" + id);
assertThat(body.getString("_links.remove.href")).endsWith("/jpa-records/" + id);
given().accept("application/json")
.when().get(location)
.then().statusCode(200)
.and().body("id", is(equalTo(id)))
.and().body("name", is(equalTo("test-create-hal")));
}
@Test
void shouldCreateAndUpdate() {
Response createResponse = given().accept("application/json")
.and().contentType("application/json")
.and().body("{\"name\": \"test-update-create\"}")
.when().post("/jpa-records/")
.thenReturn();
assertThat(createResponse.statusCode()).isEqualTo(201);
String location = createResponse.header("Location");
int id = Integer.parseInt(location.substring(createResponse.header("Location").lastIndexOf("/") + 1));
JsonPath body = createResponse.body().jsonPath();
assertThat(body.getInt("id")).isEqualTo(id);
assertThat(body.getString("name")).isEqualTo("test-update-create");
given().accept("application/json")
.and().contentType("application/json")
.and().body("{\"id\": \"" + id + "\", \"name\": \"test-update\"}")
.when().put(location)
.then()
.statusCode(204);
given().accept("application/json")
.when().get(location)
.then().statusCode(200)
.and().body("id", is(equalTo(id)))
.and().body("name", is(equalTo("test-update")));
}
@Test
void shouldCreateAndUpdateHal() {
Response createResponse = given().accept("application/hal+json")
.and().contentType("application/json")
.and().body("{\"name\": \"test-update-create-hal\"}")
.when().post("/jpa-records/")
.thenReturn();
assertThat(createResponse.statusCode()).isEqualTo(201);
String location = createResponse.header("Location");
int id = Integer.parseInt(location.substring(createResponse.header("Location").lastIndexOf("/") + 1));
JsonPath body = createResponse.body().jsonPath();
assertThat(body.getInt("id")).isEqualTo(id);
assertThat(body.getString("name")).isEqualTo("test-update-create-hal");
assertThat(body.getString("_links.add.href")).endsWith("/jpa-records");
assertThat(body.getString("_links.list.href")).endsWith("/jpa-records");
assertThat(body.getString("_links.self.href")).endsWith("/jpa-records/" + id);
assertThat(body.getString("_links.update.href")).endsWith("/jpa-records/" + id);
assertThat(body.getString("_links.remove.href")).endsWith("/jpa-records/" + id);
given().accept("application/json")
.and().contentType("application/json")
.and().body("{\"id\": \"" + id + "\", \"name\": \"test-update-hal\"}")
.when().put(location)
.then()
.statusCode(204);
given().accept("application/json")
.when().get(location)
.then().statusCode(200)
.and().body("id", is(equalTo(id)))
.and().body("name", is(equalTo("test-update-hal")));
}
@Test
void shouldCreateAndDelete() {
Response createResponse = given().accept("application/json")
.and().contentType("application/json")
.and().body("{\"name\": \"test-delete\"}")
.when().post("/jpa-records")
.thenReturn();
assertThat(createResponse.statusCode()).isEqualTo(201);
String location = createResponse.header("Location");
when().delete(location)
.then().statusCode(204);
when().get(location)
.then().statusCode(404);
}
@Test
void shouldNotDeleteNonExistent() {
when().delete("/jpa-records/1000")
.then().statusCode(404);
}
private void assertLinks(Headers headers, Map<String, String> expectedLinks) {
List<Link> links = new LinkedList<>();
for (Header header : headers.getList("Link")) {
links.add(Link.valueOf(header.getValue()));
}
assertThat(links).hasSize(expectedLinks.size());
for (Map.Entry<String, String> expectedLink : expectedLinks.entrySet()) {
assertThat(links).anySatisfy(link -> {
assertThat(link.getUri().toString()).endsWith(expectedLink.getValue());
assertThat(link.getRel()).isEqualTo(expectedLink.getKey());
});
}
}
}
|
JpaResourceTest
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderBenchmark.java
|
{
"start": 1971,
"end": 2598
}
|
class ____ {
private RawErasureCoderBenchmark() {
// prevent instantiation
}
// target size of input data buffer
private static final int TARGET_BUFFER_SIZE_MB = 126;
private static final int MAX_CHUNK_SIZE =
TARGET_BUFFER_SIZE_MB / BenchData.NUM_DATA_UNITS * 1024;
private static final List<RawErasureCoderFactory> CODER_MAKERS =
Collections.unmodifiableList(
Arrays.asList(new DummyRawErasureCoderFactory(),
new RSLegacyRawErasureCoderFactory(),
new RSRawErasureCoderFactory(),
new NativeRSRawErasureCoderFactory()));
|
RawErasureCoderBenchmark
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FlagSet.java
|
{
"start": 7747,
"end": 8332
}
|
enum ____
* @return a mutable FlagSet
*/
@SafeVarargs
public static <E extends Enum<E>> FlagSet<E> createFlagSet(
final Class<E> enumClass,
final String prefix,
final E... enabled) {
final FlagSet<E> flagSet = new FlagSet<>(enumClass, prefix, null);
Arrays.stream(enabled).forEach(flag -> {
if (flag != null) {
flagSet.enable(flag);
}
});
return flagSet;
}
/**
* Build a FlagSet from a comma separated list of values.
* Case independent.
* Special handling of "*" meaning: all values.
* @param enumClass
|
type
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/qualifiers/multiple/MultipleCompositeQualifierSpec.java
|
{
"start": 1344,
"end": 2698
}
|
class ____ {
@Inject
@XPayBy(XPaymentMethod.CREDIT_CARD)
MoneyProcessor creditCartProcessor1;
@Inject
@RequiresSignature
MoneyProcessor creditCartProcessor2;
@Inject
@RequiresSignature
@Credit
MoneyProcessor creditCartProcessor3;
@Inject
@XPayBy(XPaymentMethod.TRANSFER)
MoneyProcessor bankTransferProcessor1;
final MoneyProcessor fromCtorCreditCartProcessor1;
final MoneyProcessor fromCtorCreditCartProcessor2;
final MoneyProcessor fromCtorCreditCartProcessor3;
final MoneyProcessor fromCtorBankTransferProcessor1;
XMyBean(
@XPayBy(XPaymentMethod.CREDIT_CARD)
MoneyProcessor fromCtorCreditCartProcessor1,
@RequiresSignature
MoneyProcessor fromCtorCreditCartProcessor2,
@RequiresSignature
@Credit
MoneyProcessor fromCtorCreditCartProcessor3,
@XPayBy(XPaymentMethod.TRANSFER)
MoneyProcessor fromCtorBankTransferProcessor1) {
this.fromCtorCreditCartProcessor1 = fromCtorCreditCartProcessor1;
this.fromCtorCreditCartProcessor2 = fromCtorCreditCartProcessor2;
this.fromCtorCreditCartProcessor3 = fromCtorCreditCartProcessor3;
this.fromCtorBankTransferProcessor1 = fromCtorBankTransferProcessor1;
}
}
|
XMyBean
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/subresource/SubResourceTest.java
|
{
"start": 7473,
"end": 10050
}
|
class ____$RootClient$$QuarkusRestClientInterface implements Closeable, RootClient {
final WebTarget target1_1;
final WebTarget target1_2;
final WebTarget target1_3;
public SubResourceTest$RootClient$$QuarkusRestClientInterface(WebTarget var1) {
WebTarget var3 = var1.path("/path/{rootParam}");
DefaultClientHeadersFactoryImpl var2 = new DefaultClientHeadersFactoryImpl();
MicroProfileRestClientRequestFilter var4 = new MicroProfileRestClientRequestFilter((ClientHeadersFactory)var2);
var3 = (WebTarget)((Configurable)var3).register(var4);
String var6 = "/{methodParam}";
WebTarget var5 = var3.path(var6);
String var7 = "";
var5 = var5.path(var7);
this.target1_1 = var5;
String var9 = "/{methodParam}";
WebTarget var8 = var3.path(var9);
String var10 = "/simple";
var8 = var8.path(var10);
this.target1_2 = var8;
String var12 = "/{methodParam}";
WebTarget var11 = var3.path(var12);
String var13 = "/sub";
var11 = var11.path(var13);
this.target1_3 = var11;
}
public SubClient sub(String var1, String var2) {
SubResourceTest$SubCliented77e297b94a7e0aa21c1f7f1d8ba4fbe72d61861 var3 = new SubResourceTest$SubCliented77e297b94a7e0aa21c1f7f1d8ba4fbe72d61861();
var3.param0 = var1;
var3.param1 = var2;
WebTarget var4 = this.target1_1;
var3.target1 = var4;
WebTarget var5 = this.target1_2;
var3.target2 = var5;
WebTarget var6 = this.target1_3;
var3.target3 = var6;
return (SubClient)var3;
}
public void close() {
((WebTargetImpl)this.target1_1).getRestClient().close();
((WebTargetImpl)this.target1_2).getRestClient().close();
((WebTargetImpl)this.target1_3).getRestClient().close();
((WebTargetImpl)((SubClientf48b9cee6dde6b96b184ff11e432714265b0c2161)this).target3_1).getRestClient().close();
}
}
========================================================
SubClient:
========================================================
package io.quarkus.rest.client.reactive.subresource;
import io.quarkus.rest.client.reactive.HeaderFiller;
import io.quarkus.rest.client.reactive.subresource.SubResourceTest.SubClient;
import java.lang.reflect.Method;
import jakarta.ws.rs.ProcessingException;
import jakarta.ws.rs.WebApplicationException;
import jakarta.ws.rs.client.Entity;
import jakarta.ws.rs.client.WebTarget;
import jakarta.ws.rs.client.Invocation.Builder;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
// $FF: synthetic
|
SubResourceTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeSearchContextByShardId.java
|
{
"start": 3156,
"end": 4066
}
|
class ____<T> implements IndexedByShardId<T> {
private final T[] array;
private final int from;
private final int to;
SubRanged(T[] array, int from, int to) {
this.array = array;
this.from = from;
this.to = to;
}
@Override
public T get(int shardId) {
if (shardId < from || shardId >= to) {
throw new IndexOutOfBoundsException("shardId " + shardId + " out of bounds [" + from + ", " + to + ")");
}
return array[shardId];
}
@Override
public Collection<? extends T> collection() {
return Arrays.asList(array).subList(from, to);
}
@Override
public <S> IndexedByShardId<S> map(Function<T, S> mapper) {
return new Mapped<>(this, to - from, from, mapper);
}
}
private static
|
SubRanged
|
java
|
apache__camel
|
components/camel-jetty/src/test/java/org/apache/camel/component/jetty/rest/RestJettyInvalidJSonClientResponseValidationTest.java
|
{
"start": 1440,
"end": 3142
}
|
class ____ extends BaseJettyTest {
@Test
public void testJettyInvalidJSon() {
FluentProducerTemplate requestTemplate = fluentTemplate.withHeader(Exchange.CONTENT_TYPE, "application/json")
.withHeader(Exchange.HTTP_METHOD, "post")
.withBody("{\"name\": \"Donald\"}") // the body is ok
.to("http://localhost:" + getPort() + "/users/123/update");
Exception ex = assertThrows(CamelExecutionException.class, () -> requestTemplate.request(String.class));
HttpOperationFailedException cause = assertIsInstanceOf(HttpOperationFailedException.class, ex.getCause());
assertEquals(500, cause.getStatusCode());
assertEquals("Invalid response JSon payload.", cause.getResponseBody());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// configure to use jetty on localhost with the given port
restConfiguration().component("jetty").host("localhost").port(getPort())
.bindingMode(RestBindingMode.json)
// turn on response validation
.clientResponseValidation(true);
// use the rest DSL to define the rest services
rest("/users/").post("{id}/update")
.consumes("application/json").produces("application/json")
.to("direct:update");
from("direct:update").setBody(constant("{ \"status\": \"ok\"")); // this json is invalid
}
};
}
}
|
RestJettyInvalidJSonClientResponseValidationTest
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/issue_1100/Issue1177_3.java
|
{
"start": 329,
"end": 857
}
|
class ____ extends TestCase {
public void test_for_issue() throws Exception {
String text = "[{\"x\":\"y\"},{\"x\":\"y\"}]";
List<Model> jsonObject = JSONObject.parseObject(text, new TypeReference<List<Model>>(){});
System.out.println(JSON.toJSONString(jsonObject));
String jsonpath = "$..x";
String value="y2";
JSONPath.set(jsonObject, jsonpath, value);
assertEquals("[{\"x\":\"y2\"},{\"x\":\"y2\"}]", JSON.toJSONString(jsonObject));
}
public static
|
Issue1177_3
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/inlineme/SuggesterTest.java
|
{
"start": 988,
"end": 2161
}
|
class ____ {
private final BugCheckerRefactoringTestHelper refactoringTestHelper =
BugCheckerRefactoringTestHelper.newInstance(Suggester.class, getClass());
@Test
public void buildAnnotation_withImports() {
assertThat(
InlineMeData.buildAnnotation(
"REPLACEMENT",
ImmutableSet.of("java.time.Duration", "java.time.Instant"),
ImmutableSet.of()))
.isEqualTo(
"@InlineMe(replacement = \"REPLACEMENT\", "
+ "imports = {\"java.time.Duration\", \"java.time.Instant\"})\n");
}
@Test
public void buildAnnotation_withSingleImport() {
assertThat(
InlineMeData.buildAnnotation(
"REPLACEMENT", ImmutableSet.of("java.time.Duration"), ImmutableSet.of()))
.isEqualTo(
"@InlineMe(replacement = \"REPLACEMENT\", " + "imports = \"java.time.Duration\")\n");
}
@Test
public void instanceMethodNewImport() {
refactoringTestHelper
.addInputLines(
"Client.java",
"""
package com.google.frobber;
import java.time.Duration;
public final
|
SuggesterTest
|
java
|
google__guava
|
android/guava/src/com/google/common/graph/PredecessorsFunction.java
|
{
"start": 752,
"end": 879
}
|
interface ____ <a
* href="https://en.wikipedia.org/wiki/Graph_(discrete_mathematics)">graph</a>-structured data.
*
* <p>This
|
for
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/kstream/internals/suppress/StrictBufferConfigImpl.java
|
{
"start": 1097,
"end": 4284
}
|
class ____ extends BufferConfigInternal<Suppressed.StrictBufferConfig> implements Suppressed.StrictBufferConfig {
private final long maxRecords;
private final long maxBytes;
private final BufferFullStrategy bufferFullStrategy;
private final Map<String, String> logConfig;
public StrictBufferConfigImpl(final long maxRecords,
final long maxBytes,
final BufferFullStrategy bufferFullStrategy,
final Map<String, String> logConfig) {
this.maxRecords = maxRecords;
this.maxBytes = maxBytes;
this.bufferFullStrategy = bufferFullStrategy;
this.logConfig = logConfig;
}
public StrictBufferConfigImpl() {
this.maxRecords = Long.MAX_VALUE;
this.maxBytes = Long.MAX_VALUE;
this.bufferFullStrategy = SHUT_DOWN;
this.logConfig = Collections.emptyMap();
}
@Override
public Suppressed.StrictBufferConfig withMaxRecords(final long recordLimit) {
return new StrictBufferConfigImpl(recordLimit, maxBytes, bufferFullStrategy, logConfig());
}
@Override
public Suppressed.StrictBufferConfig withMaxBytes(final long byteLimit) {
return new StrictBufferConfigImpl(maxRecords, byteLimit, bufferFullStrategy, logConfig());
}
@Override
public long maxRecords() {
return maxRecords;
}
@Override
public long maxBytes() {
return maxBytes;
}
@Override
public BufferFullStrategy bufferFullStrategy() {
return bufferFullStrategy;
}
@Override
public Suppressed.StrictBufferConfig withLoggingDisabled() {
return new StrictBufferConfigImpl(maxRecords, maxBytes, bufferFullStrategy, null);
}
@Override
public Suppressed.StrictBufferConfig withLoggingEnabled(final Map<String, String> config) {
return new StrictBufferConfigImpl(maxRecords, maxBytes, bufferFullStrategy, config);
}
@Override
public boolean isLoggingEnabled() {
return logConfig != null;
}
@Override
public Map<String, String> logConfig() {
return isLoggingEnabled() ? logConfig : Collections.emptyMap();
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final StrictBufferConfigImpl that = (StrictBufferConfigImpl) o;
return maxRecords == that.maxRecords &&
maxBytes == that.maxBytes &&
bufferFullStrategy == that.bufferFullStrategy &&
Objects.equals(logConfig(), ((StrictBufferConfigImpl) o).logConfig());
}
@Override
public int hashCode() {
return Objects.hash(maxRecords, maxBytes, bufferFullStrategy, logConfig());
}
@Override
public String toString() {
return "StrictBufferConfigImpl{maxKeys=" + maxRecords +
", maxBytes=" + maxBytes +
", bufferFullStrategy=" + bufferFullStrategy +
", logConfig=" + logConfig().toString() +
'}';
}
}
|
StrictBufferConfigImpl
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/type/descriptor/jdbc/ObjectNullAsNullTypeJdbcType.java
|
{
"start": 563,
"end": 1849
}
|
class ____ extends ObjectJdbcType {
/**
* Singleton access
*/
public static final ObjectNullAsNullTypeJdbcType INSTANCE = new ObjectNullAsNullTypeJdbcType( Types.JAVA_OBJECT );
public ObjectNullAsNullTypeJdbcType(int jdbcTypeCode) {
super( jdbcTypeCode );
}
@Override
public <X> ValueBinder<X> getBinder(JavaType<X> javaType) {
if ( Serializable.class.isAssignableFrom( javaType.getJavaTypeClass() ) ) {
return VarbinaryJdbcType.INSTANCE.getBinder( javaType );
}
return new BasicBinder<>( javaType, this ) {
@Override
protected void doBindNull(PreparedStatement st, int index, WrapperOptions options)
throws SQLException {
st.setNull( index, Types.NULL );
}
@Override
protected void doBindNull(CallableStatement st, String name, WrapperOptions options)
throws SQLException {
st.setNull( name, Types.NULL );
}
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
throws SQLException {
st.setObject( index, value, getJdbcTypeCode() );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
st.setObject( name, value, getJdbcTypeCode() );
}
};
}
}
|
ObjectNullAsNullTypeJdbcType
|
java
|
apache__camel
|
core/camel-main/src/main/java/org/apache/camel/main/MainConfigurationProperties.java
|
{
"start": 35935,
"end": 37362
}
|
class ____ the known list of configurations classes.
*/
@SuppressWarnings("unchecked")
public MainConfigurationProperties withConfigurations(
Class<? extends CamelConfiguration>... configuration) {
addConfigurationClass(configuration);
return this;
}
/**
* Sets the configuration objects used to configure the camel context.
*/
public MainConfigurationProperties withConfigurations(List<CamelConfiguration> configurations) {
setConfigurations(configurations);
return this;
}
// fluent builder - routes builders
// --------------------------------------------------------------
/**
* Sets classes names that implement {@link RoutesBuilder}.
*/
public MainConfigurationProperties withRoutesBuilderClasses(String builders) {
setRoutesBuilderClasses(builders);
return this;
}
/**
* Sets the RoutesBuilder instances.
*/
public MainConfigurationProperties withRoutesBuilders(List<RoutesBuilder> builders) {
setRoutesBuilders(builders);
return this;
}
/**
* Add an additional {@link RoutesBuilder} object to the known list of builders.
*/
public MainConfigurationProperties withAdditionalRoutesBuilder(RoutesBuilder builder) {
addRoutesBuilder(builder);
return this;
}
/**
* Add an additional {@link RoutesBuilder}
|
to
|
java
|
apache__dubbo
|
dubbo-metadata/dubbo-metadata-api/src/main/java/org/apache/dubbo/metadata/OpenAPIRequestOrBuilder.java
|
{
"start": 845,
"end": 5181
}
|
interface ____
extends
// @@protoc_insertion_point(interface_extends:org.apache.dubbo.metadata.OpenAPIRequest)
com.google.protobuf.MessageOrBuilder {
/**
* <pre>
* The openAPI group.
* </pre>
*
* <code>string group = 1;</code>
* @return The group.
*/
String getGroup();
/**
* <pre>
* The openAPI group.
* </pre>
*
* <code>string group = 1;</code>
* @return The bytes for group.
*/
com.google.protobuf.ByteString getGroupBytes();
/**
* <pre>
* The openAPI version, using a major.minor.patch versioning scheme
* e.g. 1.0.1
* </pre>
*
* <code>string version = 2;</code>
* @return The version.
*/
String getVersion();
/**
* <pre>
* The openAPI version, using a major.minor.patch versioning scheme
* e.g. 1.0.1
* </pre>
*
* <code>string version = 2;</code>
* @return The bytes for version.
*/
com.google.protobuf.ByteString getVersionBytes();
/**
* <pre>
* The openAPI tags. Each tag is an or condition.
* </pre>
*
* <code>repeated string tag = 3;</code>
* @return A list containing the tag.
*/
java.util.List<String> getTagList();
/**
* <pre>
* The openAPI tags. Each tag is an or condition.
* </pre>
*
* <code>repeated string tag = 3;</code>
* @return The count of tag.
*/
int getTagCount();
/**
* <pre>
* The openAPI tags. Each tag is an or condition.
* </pre>
*
* <code>repeated string tag = 3;</code>
* @param index The index of the element to return.
* @return The tag at the given index.
*/
String getTag(int index);
/**
* <pre>
* The openAPI tags. Each tag is an or condition.
* </pre>
*
* <code>repeated string tag = 3;</code>
* @param index The index of the value to return.
* @return The bytes of the tag at the given index.
*/
com.google.protobuf.ByteString getTagBytes(int index);
/**
* <pre>
* The openAPI services. Each service is an or condition.
* </pre>
*
* <code>repeated string service = 4;</code>
* @return A list containing the service.
*/
java.util.List<String> getServiceList();
/**
* <pre>
* The openAPI services. Each service is an or condition.
* </pre>
*
* <code>repeated string service = 4;</code>
* @return The count of service.
*/
int getServiceCount();
/**
* <pre>
* The openAPI services. Each service is an or condition.
* </pre>
*
* <code>repeated string service = 4;</code>
* @param index The index of the element to return.
* @return The service at the given index.
*/
String getService(int index);
/**
* <pre>
* The openAPI services. Each service is an or condition.
* </pre>
*
* <code>repeated string service = 4;</code>
* @param index The index of the value to return.
* @return The bytes of the service at the given index.
*/
com.google.protobuf.ByteString getServiceBytes(int index);
/**
* <pre>
* The openAPI specification version, using a major.minor.patch versioning scheme
* e.g. 3.0.1, 3.1.0
* The default value is '3.0.1'.
* </pre>
*
* <code>string openapi = 5;</code>
* @return The openapi.
*/
String getOpenapi();
/**
* <pre>
* The openAPI specification version, using a major.minor.patch versioning scheme
* e.g. 3.0.1, 3.1.0
* The default value is '3.0.1'.
* </pre>
*
* <code>string openapi = 5;</code>
* @return The bytes for openapi.
*/
com.google.protobuf.ByteString getOpenapiBytes();
/**
* <pre>
* The format of the response.
* The default value is 'JSON'.
* </pre>
*
* <code>optional .org.apache.dubbo.metadata.OpenAPIFormat format = 6;</code>
* @return Whether the format field is set.
*/
boolean hasFormat();
/**
* <pre>
* The format of the response.
* The default value is 'JSON'.
* </pre>
*
* <code>optional .org.apache.dubbo.metadata.OpenAPIFormat format = 6;</code>
* @return The
|
OpenAPIRequestOrBuilder
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/kotlin/KspComponentProcessorTest.java
|
{
"start": 871,
"end": 1197
}
|
class ____ {
@Test
public void emptyComponentTest() throws Exception {
Source componentSrc =
CompilerTests.kotlinSource(
"MyComponent.kt",
"package test",
"",
"import dagger.Component",
"",
"@Component",
"
|
KspComponentProcessorTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/StatementSwitchToExpressionSwitchTest.java
|
{
"start": 118280,
"end": 119504
}
|
class ____ {
int x;
public Test(int foo) {
x = -1;
}
public int foo(Suit suit) {
before:
for (; ; ) {
switch (suit) {
case HEART:
x = 2;
break;
case DIAMOND:
x = (((x + 1) * (x * x)) << 1);
break;
case SPADE:
break before;
default:
throw new NullPointerException();
}
break;
}
after:
return x;
}
}
""")
.setArgs(
"-XepOpt:StatementSwitchToExpressionSwitch:EnableAssignmentSwitchConversion",
"-XepOpt:StatementSwitchToExpressionSwitch:EnableDirectConversion=false")
.doTest();
}
@Test
public void switchByEnum_assignmentLabelledBreak2_noError() {
// Can't convert because of "break before" as the second statement in its block
helper
.addSourceLines(
"Test.java",
"""
|
Test
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/ControlbusComponentBuilderFactory.java
|
{
"start": 1847,
"end": 3965
}
|
interface ____ extends ComponentBuilder<ControlBusComponent> {
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default ControlbusComponentBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default ControlbusComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
}
|
ControlbusComponentBuilder
|
java
|
apache__flink
|
flink-formats/flink-parquet/src/main/java/org/apache/flink/formats/parquet/protobuf/ParquetProtoWriters.java
|
{
"start": 1931,
"end": 2690
}
|
class ____<T extends Message>
extends ParquetWriter.Builder<T, ParquetProtoWriterBuilder<T>> {
private final Class<T> clazz;
public ParquetProtoWriterBuilder(OutputFile outputFile, Class<T> clazz) {
super(outputFile);
this.clazz = clazz;
}
@Override
protected ParquetProtoWriterBuilder<T> self() {
return this;
}
@Override
protected WriteSupport<T> getWriteSupport(Configuration conf) {
// Use patched implementation compatible with protobuf 4.x
return new PatchedProtoWriteSupport<>(clazz);
}
}
/** Class is not meant to be instantiated. */
private ParquetProtoWriters() {}
}
|
ParquetProtoWriterBuilder
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/validator/ValidatorWithResourceResolverRouteTest.java
|
{
"start": 1408,
"end": 4297
}
|
class ____ extends ContextTestSupport {
protected MockEndpoint validEndpoint;
protected MockEndpoint finallyEndpoint;
protected MockEndpoint invalidEndpoint;
@Test
public void testValidMessage() throws Exception {
validEndpoint.expectedMessageCount(1);
finallyEndpoint.expectedMessageCount(1);
invalidEndpoint.expectedMessageCount(0);
template
.sendBody("direct:start",
"<report xmlns='http://foo.com/report' xmlns:rb='http://foo.com/report-base'><author><rb:name>Knuth</rb:name></author><content><rb:chapter><rb:subject></rb:subject>"
+ "<rb:abstract></rb:abstract><rb:body></rb:body></rb:chapter></content></report>");
MockEndpoint.assertIsSatisfied(validEndpoint, invalidEndpoint, finallyEndpoint);
}
@Test
public void testInvalidMessage() throws Exception {
validEndpoint.expectedMessageCount(0);
invalidEndpoint.expectedMessageCount(1);
finallyEndpoint.expectedMessageCount(1);
template.sendBody("direct:start",
"<report xmlns='http://foo.com/report' xmlns:rb='http://foo.com/report-base'><author><rb:name>Knuth</rb:name></author></report>");
MockEndpoint.assertIsSatisfied(validEndpoint, invalidEndpoint, finallyEndpoint);
}
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
validEndpoint = resolveMandatoryEndpoint("mock:valid", MockEndpoint.class);
invalidEndpoint = resolveMandatoryEndpoint("mock:invalid", MockEndpoint.class);
finallyEndpoint = resolveMandatoryEndpoint("mock:finally", MockEndpoint.class);
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
// we have to do it here, because we need the context created first
CatalogManager.getStaticManager().setIgnoreMissingProperties(true);
CatalogResolver catalogResolver = new CatalogResolver(true);
URL catalogUrl = ResourceHelper.resolveMandatoryResourceAsUrl(context,
"org/apache/camel/component/validator/catalog.cat");
catalogResolver.getCatalog().parseCatalog(catalogUrl);
LSResourceResolver resourceResolver = new CatalogLSResourceResolver(catalogResolver);
context.getRegistry().bind("resourceResolver", resourceResolver);
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").doTry()
.to("validator:org/apache/camel/component/validator/report.xsd?resourceResolver=#resourceResolver")
.to("mock:valid")
.doCatch(ValidationException.class).to("mock:invalid").doFinally().to("mock:finally").end();
}
};
}
}
|
ValidatorWithResourceResolverRouteTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/generated/GeneratedAnnotationBatchTest.java
|
{
"start": 3370,
"end": 4382
}
|
class ____ {
@Id
@GeneratedValue
private Long id;
private String name;
@Generated(event = INSERT)
@ColumnDefault("1")
private Integer generatedProp;
@CurrentTimestamp(event = { INSERT, UPDATE })
private Instant updateTimestamp;
public GeneratedEntity() {
}
public GeneratedEntity(String name) {
this.name = name;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Integer getGeneratedProp() {
return generatedProp;
}
public Instant getUpdateTimestamp() {
return updateTimestamp;
}
}
private static void waitALittle(SessionFactoryScope scope) {
boolean waitLonger =
// informix clock has low resolution on Mac
scope.getSessionFactory().getJdbcServices().getDialect()
instanceof InformixDialect;
try {
Thread.sleep( waitLonger ? 1_200 : 10 );
}
catch (InterruptedException e) {
throw new HibernateError( "Unexpected wakeup from test sleep" );
}
}
}
|
GeneratedEntity
|
java
|
quarkusio__quarkus
|
extensions/panache/mongodb-panache/runtime/src/main/java/io/quarkus/mongodb/panache/reactive/ReactivePanacheMongoEntity.java
|
{
"start": 666,
"end": 1031
}
|
class ____ extends ReactivePanacheMongoEntityBase {
/**
* The auto-generated ID field.
* This field is set by Mongo when this entity is persisted.
*
* @see #persist()
*/
public ObjectId id;
@Override
public String toString() {
return this.getClass().getSimpleName() + "<" + id + ">";
}
}
|
ReactivePanacheMongoEntity
|
java
|
spring-projects__spring-framework
|
spring-jdbc/src/test/java/org/springframework/jdbc/core/support/SqlCharacterValueTests.java
|
{
"start": 1271,
"end": 4309
}
|
class ____ {
@Test
void withString() throws SQLException {
String content = "abc";
SqlCharacterValue value = new SqlCharacterValue(content);
PreparedStatement ps = mock();
value.setTypeValue(ps, 1, JdbcUtils.TYPE_UNKNOWN, null);
verify(ps).setString(1, content);
}
@Test
void withStringForClob() throws SQLException {
String content = "abc";
SqlCharacterValue value = new SqlCharacterValue(content);
PreparedStatement ps = mock();
value.setTypeValue(ps, 1, Types.CLOB, null);
verify(ps).setClob(eq(1), any(StringReader.class), eq(3L));
}
@Test
void withStringForNClob() throws SQLException {
String content = "abc";
SqlCharacterValue value = new SqlCharacterValue(content);
PreparedStatement ps = mock();
value.setTypeValue(ps, 1, Types.NCLOB, null);
verify(ps).setNClob(eq(1), any(StringReader.class), eq(3L));
}
@Test
void withCharArray() throws SQLException {
char[] content = "abc".toCharArray();
SqlCharacterValue value = new SqlCharacterValue(content);
PreparedStatement ps = mock();
value.setTypeValue(ps, 1, JdbcUtils.TYPE_UNKNOWN, null);
verify(ps).setCharacterStream(eq(1), any(CharArrayReader.class), eq(3L));
}
@Test
void withCharArrayForClob() throws SQLException {
char[] content = "abc".toCharArray();
SqlCharacterValue value = new SqlCharacterValue(content);
PreparedStatement ps = mock();
value.setTypeValue(ps, 1, Types.CLOB, null);
verify(ps).setClob(eq(1), any(CharArrayReader.class), eq(3L));
}
@Test
void withCharArrayForNClob() throws SQLException {
char[] content = "abc".toCharArray();
SqlCharacterValue value = new SqlCharacterValue(content);
PreparedStatement ps = mock();
value.setTypeValue(ps, 1, Types.NCLOB, null);
verify(ps).setNClob(eq(1), any(CharArrayReader.class), eq(3L));
}
@Test
void withReader() throws SQLException {
Reader content = new StringReader("abc");
SqlCharacterValue value = new SqlCharacterValue(content, 3);
PreparedStatement ps = mock();
value.setTypeValue(ps, 1, JdbcUtils.TYPE_UNKNOWN, null);
verify(ps).setCharacterStream(1, content, 3L);
}
@Test
void withReaderForClob() throws SQLException {
Reader content = new StringReader("abc");
SqlCharacterValue value = new SqlCharacterValue(content, 3);
PreparedStatement ps = mock();
value.setTypeValue(ps, 1, Types.CLOB, null);
verify(ps).setClob(1, content, 3L);
}
@Test
void withReaderForNClob() throws SQLException {
Reader content = new StringReader("abc");
SqlCharacterValue value = new SqlCharacterValue(content, 3);
PreparedStatement ps = mock();
value.setTypeValue(ps, 1, Types.NCLOB, null);
verify(ps).setNClob(1, content, 3L);
}
@Test
void withAsciiStream() throws SQLException {
InputStream content = new ByteArrayInputStream("abc".getBytes(StandardCharsets.US_ASCII));
SqlCharacterValue value = new SqlCharacterValue(content, 3);
PreparedStatement ps = mock();
value.setTypeValue(ps, 1, JdbcUtils.TYPE_UNKNOWN, null);
verify(ps).setAsciiStream(1, content, 3L);
}
}
|
SqlCharacterValueTests
|
java
|
apache__spark
|
connector/spark-ganglia-lgpl/src/main/java/com/codahale/metrics/ganglia/GangliaReporter.java
|
{
"start": 1887,
"end": 17288
}
|
class ____ {
private final MetricRegistry registry;
private String prefix;
private int tMax;
private int dMax;
private TimeUnit rateUnit;
private TimeUnit durationUnit;
private MetricFilter filter;
private ScheduledExecutorService executor;
private boolean shutdownExecutorOnStop;
private Set<MetricAttribute> disabledMetricAttributes = Set.of();
private Builder(MetricRegistry registry) {
this.registry = registry;
this.tMax = 60;
this.dMax = 0;
this.rateUnit = TimeUnit.SECONDS;
this.durationUnit = TimeUnit.MILLISECONDS;
this.filter = MetricFilter.ALL;
this.executor = null;
this.shutdownExecutorOnStop = true;
}
/**
* Specifies whether or not, the executor (used for reporting) will be stopped with same time with reporter.
* Default value is true.
* Setting this parameter to false, has the sense in combining with providing external managed executor via {@link #scheduleOn(ScheduledExecutorService)}.
*
* @param shutdownExecutorOnStop if true, then executor will be stopped in same time with this reporter
* @return {@code this}
*/
public Builder shutdownExecutorOnStop(boolean shutdownExecutorOnStop) {
this.shutdownExecutorOnStop = shutdownExecutorOnStop;
return this;
}
/**
* Specifies the executor to use while scheduling reporting of metrics.
* Default value is null.
* Null value leads to executor will be auto created on start.
*
* @param executor the executor to use while scheduling reporting of metrics.
* @return {@code this}
*/
public Builder scheduleOn(ScheduledExecutorService executor) {
this.executor = executor;
return this;
}
/**
* Use the given {@code tmax} value when announcing metrics.
*
* @param tMax the desired gmond {@code tmax} value
* @return {@code this}
*/
public Builder withTMax(int tMax) {
this.tMax = tMax;
return this;
}
/**
* Prefix all metric names with the given string.
*
* @param prefix the prefix for all metric names
* @return {@code this}
*/
public Builder prefixedWith(String prefix) {
this.prefix = prefix;
return this;
}
/**
* Use the given {@code dmax} value when announcing metrics.
*
* @param dMax the desired gmond {@code dmax} value
* @return {@code this}
*/
public Builder withDMax(int dMax) {
this.dMax = dMax;
return this;
}
/**
* Convert rates to the given time unit.
*
* @param rateUnit a unit of time
* @return {@code this}
*/
public Builder convertRatesTo(TimeUnit rateUnit) {
this.rateUnit = rateUnit;
return this;
}
/**
* Convert durations to the given time unit.
*
* @param durationUnit a unit of time
* @return {@code this}
*/
public Builder convertDurationsTo(TimeUnit durationUnit) {
this.durationUnit = durationUnit;
return this;
}
/**
* Only report metrics which match the given filter.
*
* @param filter a {@link MetricFilter}
* @return {@code this}
*/
public Builder filter(MetricFilter filter) {
this.filter = filter;
return this;
}
/**
* Don't report the passed metric attributes for all metrics (e.g. "p999", "stddev" or "m15").
* See {@link MetricAttribute}.
*
* @param disabledMetricAttributes a {@link MetricFilter}
* @return {@code this}
*/
public Builder disabledMetricAttributes(Set<MetricAttribute> disabledMetricAttributes) {
this.disabledMetricAttributes = disabledMetricAttributes;
return this;
}
/**
* Builds a {@link GangliaReporter} with the given properties, announcing metrics to the
* given {@link GMetric} client.
*
* @param gmetric the client to use for announcing metrics
* @return a {@link GangliaReporter}
*/
public GangliaReporter build(GMetric gmetric) {
return new GangliaReporter(registry, gmetric, null, prefix, tMax, dMax, rateUnit, durationUnit, filter,
executor, shutdownExecutorOnStop, disabledMetricAttributes);
}
/**
* Builds a {@link GangliaReporter} with the given properties, announcing metrics to the
* given {@link GMetric} client.
*
* @param gmetrics the clients to use for announcing metrics
* @return a {@link GangliaReporter}
*/
public GangliaReporter build(GMetric... gmetrics) {
return new GangliaReporter(registry, null, gmetrics, prefix, tMax, dMax, rateUnit, durationUnit,
filter, executor, shutdownExecutorOnStop , disabledMetricAttributes);
}
}
private static final SparkLogger LOGGER = SparkLoggerFactory.getLogger(GangliaReporter.class);
private final GMetric gmetric;
private final GMetric[] gmetrics;
private final String prefix;
private final int tMax;
private final int dMax;
private GangliaReporter(MetricRegistry registry,
GMetric gmetric,
GMetric[] gmetrics,
String prefix,
int tMax,
int dMax,
TimeUnit rateUnit,
TimeUnit durationUnit,
MetricFilter filter,
ScheduledExecutorService executor,
boolean shutdownExecutorOnStop,
Set<MetricAttribute> disabledMetricAttributes) {
super(registry, "ganglia-reporter", filter, rateUnit, durationUnit, executor, shutdownExecutorOnStop,
disabledMetricAttributes);
this.gmetric = gmetric;
this.gmetrics = gmetrics;
this.prefix = prefix;
this.tMax = tMax;
this.dMax = dMax;
}
@Override
public void report(SortedMap<String, Gauge> gauges,
SortedMap<String, Counter> counters,
SortedMap<String, Histogram> histograms,
SortedMap<String, Meter> meters,
SortedMap<String, Timer> timers) {
for (Map.Entry<String, Gauge> entry : gauges.entrySet()) {
reportGauge(entry.getKey(), entry.getValue());
}
for (Map.Entry<String, Counter> entry : counters.entrySet()) {
reportCounter(entry.getKey(), entry.getValue());
}
for (Map.Entry<String, Histogram> entry : histograms.entrySet()) {
reportHistogram(entry.getKey(), entry.getValue());
}
for (Map.Entry<String, Meter> entry : meters.entrySet()) {
reportMeter(entry.getKey(), entry.getValue());
}
for (Map.Entry<String, Timer> entry : timers.entrySet()) {
reportTimer(entry.getKey(), entry.getValue());
}
}
private void reportTimer(String name, Timer timer) {
final String sanitizedName = escapeSlashes(name);
final String group = group(name);
try {
final Snapshot snapshot = timer.getSnapshot();
announceIfEnabled(MAX, sanitizedName, group, convertDuration(snapshot.getMax()), getDurationUnit());
announceIfEnabled(MEAN, sanitizedName, group, convertDuration(snapshot.getMean()), getDurationUnit());
announceIfEnabled(MIN, sanitizedName, group, convertDuration(snapshot.getMin()), getDurationUnit());
announceIfEnabled(STDDEV, sanitizedName, group, convertDuration(snapshot.getStdDev()), getDurationUnit());
announceIfEnabled(P50, sanitizedName, group, convertDuration(snapshot.getMedian()), getDurationUnit());
announceIfEnabled(P75, sanitizedName,
group,
convertDuration(snapshot.get75thPercentile()),
getDurationUnit());
announceIfEnabled(P95, sanitizedName,
group,
convertDuration(snapshot.get95thPercentile()),
getDurationUnit());
announceIfEnabled(P98, sanitizedName,
group,
convertDuration(snapshot.get98thPercentile()),
getDurationUnit());
announceIfEnabled(P99, sanitizedName,
group,
convertDuration(snapshot.get99thPercentile()),
getDurationUnit());
announceIfEnabled(P999, sanitizedName,
group,
convertDuration(snapshot.get999thPercentile()),
getDurationUnit());
reportMetered(sanitizedName, timer, group, "calls");
} catch (GangliaException e) {
LOGGER.warn("Unable to report timer {}", e,
MDC.of(LogKeys.METRIC_NAME, sanitizedName));
}
}
private void reportMeter(String name, Meter meter) {
final String sanitizedName = escapeSlashes(name);
final String group = group(name);
try {
reportMetered(sanitizedName, meter, group, "events");
} catch (GangliaException e) {
LOGGER.warn("Unable to report meter {}", e,
MDC.of(LogKeys.METRIC_NAME, name));
}
}
private void reportMetered(String name, Metered meter, String group, String eventName) throws GangliaException {
final String unit = eventName + '/' + getRateUnit();
announceIfEnabled(COUNT, name, group, meter.getCount(), eventName);
announceIfEnabled(M1_RATE, name, group, convertRate(meter.getOneMinuteRate()), unit);
announceIfEnabled(M5_RATE, name, group, convertRate(meter.getFiveMinuteRate()), unit);
announceIfEnabled(M15_RATE, name, group, convertRate(meter.getFifteenMinuteRate()), unit);
announceIfEnabled(MEAN_RATE, name, group, convertRate(meter.getMeanRate()), unit);
}
private void reportHistogram(String name, Histogram histogram) {
final String sanitizedName = escapeSlashes(name);
final String group = group(name);
try {
final Snapshot snapshot = histogram.getSnapshot();
announceIfEnabled(COUNT, sanitizedName, group, histogram.getCount(), "");
announceIfEnabled(MAX, sanitizedName, group, snapshot.getMax(), "");
announceIfEnabled(MEAN, sanitizedName, group, snapshot.getMean(), "");
announceIfEnabled(MIN, sanitizedName, group, snapshot.getMin(), "");
announceIfEnabled(STDDEV, sanitizedName, group, snapshot.getStdDev(), "");
announceIfEnabled(P50, sanitizedName, group, snapshot.getMedian(), "");
announceIfEnabled(P75, sanitizedName, group, snapshot.get75thPercentile(), "");
announceIfEnabled(P95, sanitizedName, group, snapshot.get95thPercentile(), "");
announceIfEnabled(P98, sanitizedName, group, snapshot.get98thPercentile(), "");
announceIfEnabled(P99, sanitizedName, group, snapshot.get99thPercentile(), "");
announceIfEnabled(P999, sanitizedName, group, snapshot.get999thPercentile(), "");
} catch (GangliaException e) {
LOGGER.warn("Unable to report histogram {}", e,
MDC.of(LogKeys.METRIC_NAME, sanitizedName));
}
}
private void reportCounter(String name, Counter counter) {
final String sanitizedName = escapeSlashes(name);
final String group = group(name);
try {
announce(prefix(sanitizedName, COUNT.getCode()), group, Long.toString(counter.getCount()), GMetricType.DOUBLE, "");
} catch (GangliaException e) {
LOGGER.warn("Unable to report counter {}", e,
MDC.of(LogKeys.METRIC_NAME, name));
}
}
private void reportGauge(String name, Gauge gauge) {
final String sanitizedName = escapeSlashes(name);
final String group = group(name);
final Object obj = gauge.getValue();
final String value = String.valueOf(obj);
final GMetricType type = detectType(obj);
try {
announce(name(prefix, sanitizedName), group, value, type, "");
} catch (GangliaException e) {
LOGGER.warn("Unable to report gauge {}", e,
MDC.of(LogKeys.METRIC_NAME, name));
}
}
private static final double MIN_VAL = 1E-300;
private void announceIfEnabled(MetricAttribute metricAttribute, String metricName, String group, double value, String units)
throws GangliaException {
if (getDisabledMetricAttributes().contains(metricAttribute)) {
return;
}
final String string = Math.abs(value) < MIN_VAL ? "0" : Double.toString(value);
announce(prefix(metricName, metricAttribute.getCode()), group, string, GMetricType.DOUBLE, units);
}
private void announceIfEnabled(MetricAttribute metricAttribute, String metricName, String group, long value, String units)
throws GangliaException {
if (getDisabledMetricAttributes().contains(metricAttribute)) {
return;
}
announce(prefix(metricName, metricAttribute.getCode()), group, Long.toString(value), GMetricType.DOUBLE, units);
}
private void announce(String name, String group, String value, GMetricType type, String units)
throws GangliaException {
if (gmetric != null) {
gmetric.announce(name, value, type, units, GMetricSlope.BOTH, tMax, dMax, group);
} else {
for (GMetric gmetric : gmetrics) {
gmetric.announce(name, value, type, units, GMetricSlope.BOTH, tMax, dMax, group);
}
}
}
private GMetricType detectType(Object o) {
if (o instanceof Float) {
return GMetricType.FLOAT;
} else if (o instanceof Double) {
return GMetricType.DOUBLE;
} else if (o instanceof Byte) {
return GMetricType.INT8;
} else if (o instanceof Short) {
return GMetricType.INT16;
} else if (o instanceof Integer) {
return GMetricType.INT32;
} else if (o instanceof Long) {
return GMetricType.DOUBLE;
}
return GMetricType.STRING;
}
private String group(String name) {
final int i = name.lastIndexOf('.');
if (i < 0) {
return "";
}
return name.substring(0, i);
}
private String prefix(String name, String n) {
return name(prefix, name, n);
}
// ganglia metric names can't contain slashes.
private String escapeSlashes(String name) {
return SLASHES.matcher(name).replaceAll("_");
}
}
|
Builder
|
java
|
redisson__redisson
|
redisson-spring-data/redisson-spring-data-32/src/test/java/org/redisson/spring/data/connection/RedissonClusterConnectionTest.java
|
{
"start": 881,
"end": 11319
}
|
class ____ extends BaseTest {
@Test
public void testRandomKey() {
testInCluster(connection -> {
RedissonClient redisson = (RedissonClient) connection.getNativeConnection();
StringRedisTemplate redisTemplate = new StringRedisTemplate();
redisTemplate.setConnectionFactory(new RedissonConnectionFactory(redisson));
redisTemplate.afterPropertiesSet();
for (int i = 0; i < 10; i++) {
redisTemplate.opsForValue().set("i" + i, "i" + i);
}
for (RedisClusterNode clusterNode : redisTemplate.getConnectionFactory().getClusterConnection().clusterGetNodes()) {
String key = redisTemplate.opsForCluster().randomKey(clusterNode);
assertThat(key).isNotNull();
}
});
}
@Test
public void testDel() {
testInCluster(connection -> {
List<byte[]> keys = new ArrayList<>();
for (int i = 0; i < 10; i++) {
byte[] key = ("test" + i).getBytes();
keys.add(key);
connection.set(key, ("test" + i).getBytes());
}
assertThat(connection.del(keys.toArray(new byte[0][]))).isEqualTo(10);
});
}
@Test
public void testScan() {
testInCluster(connection -> {
Map<byte[], byte[]> map = new HashMap<>();
for (int i = 0; i < 10000; i++) {
map.put(RandomString.make(32).getBytes(), RandomString.make(32).getBytes(StandardCharsets.UTF_8));
}
connection.mSet(map);
Cursor<byte[]> b = connection.scan(ScanOptions.scanOptions().build());
Set<String> sett = new HashSet<>();
int counter = 0;
while (b.hasNext()) {
byte[] tt = b.next();
sett.add(new String(tt));
counter++;
}
assertThat(sett.size()).isEqualTo(map.size());
assertThat(counter).isEqualTo(map.size());
});
}
@Test
public void testMSet() {
testInCluster(connection -> {
Map<byte[], byte[]> map = new HashMap<>();
for (int i = 0; i < 10; i++) {
map.put(("test" + i).getBytes(), ("test" + i*100).getBytes());
}
connection.mSet(map);
for (Map.Entry<byte[], byte[]> entry : map.entrySet()) {
assertThat(connection.get(entry.getKey())).isEqualTo(entry.getValue());
}
});
}
@Test
public void testMGet() {
testInCluster(connection -> {
Map<byte[], byte[]> map = new HashMap<>();
for (int i = 0; i < 10; i++) {
map.put(("test" + i).getBytes(), ("test" + i*100).getBytes());
}
connection.mSet(map);
List<byte[]> r = connection.mGet(map.keySet().toArray(new byte[0][]));
assertThat(r).containsExactly(map.values().toArray(new byte[0][]));
});
}
@Test
public void testClusterGetNodes() {
testInCluster(connection -> {
Iterable<RedisClusterNode> nodes = connection.clusterGetNodes();
assertThat(nodes).hasSize(6);
for (RedisClusterNode redisClusterNode : nodes) {
assertThat(redisClusterNode.getLinkState()).isNotNull();
assertThat(redisClusterNode.getFlags()).isNotEmpty();
assertThat(redisClusterNode.getHost()).isNotNull();
assertThat(redisClusterNode.getPort()).isNotNull();
assertThat(redisClusterNode.getId()).isNotNull();
assertThat(redisClusterNode.getType()).isNotNull();
if (redisClusterNode.getType() == NodeType.MASTER) {
assertThat(redisClusterNode.getSlotRange().getSlots()).isNotEmpty();
} else {
assertThat(redisClusterNode.getMasterId()).isNotNull();
}
}
});
}
@Test
public void testClusterGetNodesMaster() {
testInCluster(connection -> {
Iterable<RedisClusterNode> nodes = connection.clusterGetNodes();
for (RedisClusterNode redisClusterNode : nodes) {
if (redisClusterNode.getType() == NodeType.MASTER) {
Collection<RedisClusterNode> slaves = connection.clusterGetReplicas(redisClusterNode);
assertThat(slaves).hasSize(1);
}
}
});
}
@Test
public void testClusterGetMasterSlaveMap() {
testInCluster(connection -> {
Map<RedisClusterNode, Collection<RedisClusterNode>> map = connection.clusterGetMasterReplicaMap();
assertThat(map).hasSize(3);
for (Collection<RedisClusterNode> slaves : map.values()) {
assertThat(slaves).hasSize(1);
}
});
}
@Test
public void testClusterGetSlotForKey() {
testInCluster(connection -> {
Integer slot = connection.clusterGetSlotForKey("123".getBytes());
assertThat(slot).isNotNull();
});
}
@Test
public void testClusterGetNodeForSlot() {
testInCluster(connection -> {
RedisClusterNode node1 = connection.clusterGetNodeForSlot(1);
RedisClusterNode node2 = connection.clusterGetNodeForSlot(16000);
assertThat(node1.getId()).isNotEqualTo(node2.getId());
});
}
@Test
public void testClusterGetNodeForKey() {
testInCluster(connection -> {
RedisClusterNode node = connection.clusterGetNodeForKey("123".getBytes());
assertThat(node).isNotNull();
});
}
@Test
public void testClusterGetClusterInfo() {
testInCluster(connection -> {
ClusterInfo info = connection.clusterGetClusterInfo();
assertThat(info.getSlotsFail()).isEqualTo(0);
assertThat(info.getSlotsOk()).isEqualTo(MasterSlaveConnectionManager.MAX_SLOT);
assertThat(info.getSlotsAssigned()).isEqualTo(MasterSlaveConnectionManager.MAX_SLOT);
});
}
@Test
public void testClusterAddRemoveSlots() {
testInCluster(connection -> {
RedisClusterNode master = getFirstMaster(connection);
Integer slot = master.getSlotRange().getSlots().iterator().next();
connection.clusterDeleteSlots(master, slot);
connection.clusterAddSlots(master, slot);
});
}
@Test
public void testClusterCountKeysInSlot() {
testInCluster(connection -> {
Long t = connection.clusterCountKeysInSlot(1);
assertThat(t).isZero();
});
}
@Test
public void testClusterGetKeysInSlot() {
testInCluster(connection -> {
connection.flushAll();
List<byte[]> keys = connection.clusterGetKeysInSlot(12, 10);
assertThat(keys).isEmpty();
});
}
@Test
public void testClusterPing() {
testInCluster(connection -> {
RedisClusterNode master = getFirstMaster(connection);
String res = connection.ping(master);
assertThat(res).isEqualTo("PONG");
});
}
@Test
public void testDbSize() {
testInCluster(connection -> {
connection.flushAll();
RedisClusterNode master = getFirstMaster(connection);
Long size = connection.dbSize(master);
assertThat(size).isZero();
});
}
@Test
public void testInfo() {
testInCluster(connection -> {
RedisClusterNode master = getFirstMaster(connection);
Properties info = connection.info(master);
assertThat(info.size()).isGreaterThan(10);
});
}
@Test
public void testDelPipeline() {
testInCluster(connection -> {
byte[] k = "key".getBytes();
byte[] v = "val".getBytes();
connection.set(k, v);
connection.openPipeline();
connection.get(k);
connection.del(k);
List<Object> results = connection.closePipeline();
byte[] val = (byte[])results.get(0);
assertThat(val).isEqualTo(v);
Long res = (Long) results.get(1);
assertThat(res).isEqualTo(1);
});
}
@Test
public void testResetConfigStats() {
testInCluster(connection -> {
RedisClusterNode master = getFirstMaster(connection);
connection.resetConfigStats(master);
});
}
@Test
public void testTime() {
testInCluster(connection -> {
RedisClusterNode master = getFirstMaster(connection);
Long time = connection.time(master);
assertThat(time).isGreaterThan(1000);
});
}
@Test
public void testGetClientList() {
testInCluster(connection -> {
RedisClusterNode master = getFirstMaster(connection);
List<RedisClientInfo> list = connection.getClientList(master);
assertThat(list.size()).isGreaterThan(10);
});
}
@Test
public void testSetConfig() {
testInCluster(connection -> {
RedisClusterNode master = getFirstMaster(connection);
connection.setConfig(master, "timeout", "10");
});
}
@Test
public void testGetConfig() {
testInCluster(connection -> {
RedisClusterNode master = getFirstMaster(connection);
Properties config = connection.getConfig(master, "*");
assertThat(config.size()).isGreaterThan(20);
});
}
protected RedisClusterNode getFirstMaster(RedissonClusterConnection connection) {
Map<RedisClusterNode, Collection<RedisClusterNode>> map = connection.clusterGetMasterReplicaMap();
RedisClusterNode master = map.keySet().iterator().next();
return master;
}
@Test
public void testConnectionFactoryReturnsClusterConnection() {
testInCluster(connection -> {
RedissonClient redisson = (RedissonClient) connection.getNativeConnection();
RedisConnectionFactory connectionFactory = new RedissonConnectionFactory(redisson);
assertThat(connectionFactory.getConnection()).isInstanceOf(RedissonClusterConnection.class);
});
}
}
|
RedissonClusterConnectionTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/type/descriptor/java/EnumJavaType.java
|
{
"start": 6745,
"end": 8753
}
|
enum ____
*/
public T fromName(String relationalForm) {
return relationalForm == null ? null : Enum.valueOf( getJavaTypeClass(), relationalForm.trim() );
}
@Override
public String getCheckCondition(String columnName, JdbcType jdbcType, BasicValueConverter<T, ?> converter, Dialect dialect) {
if ( converter != null
&& jdbcType.getDefaultSqlTypeCode() != NAMED_ENUM ) {
return renderConvertedEnumCheckConstraint( columnName, jdbcType, converter, dialect );
}
else if ( jdbcType.isInteger() ) {
int max = getJavaTypeClass().getEnumConstants().length - 1;
return dialect.getCheckCondition( columnName, 0, max );
}
else if ( jdbcType.isString() ) {
return dialect.getCheckCondition( columnName, getJavaTypeClass() );
}
else {
return null;
}
}
private String renderConvertedEnumCheckConstraint(
String columnName,
JdbcType jdbcType,
BasicValueConverter<T, ?> converter,
Dialect dialect) {
final Set<?> valueSet = valueSet( jdbcType, converter );
return valueSet == null ? null : dialect.getCheckCondition( columnName, valueSet, jdbcType );
}
private <R> Set<R> valueSet(JdbcType jdbcType, BasicValueConverter<T,R> converter) {
// for `@EnumeratedValue` we already have the possible values...
if ( converter instanceof EnumeratedValueConverter<T,R> enumeratedValueConverter ) {
return enumeratedValueConverter.getRelationalValueSet();
}
else {
if ( !SqlTypes.isIntegral( jdbcType.getJdbcTypeCode() )
&& !SqlTypes.isCharacterType( jdbcType.getJdbcTypeCode() ) ) {
// we only support adding check constraints for generalized conversions to
// INTEGER, SMALLINT, TINYINT, (N)CHAR, (N)VARCHAR, LONG(N)VARCHAR
return null;
}
else {
final T[] enumConstants = getJavaTypeClass().getEnumConstants();
final Set<R> valueSet = setOfSize( enumConstants.length );
for ( T enumConstant : enumConstants ) {
valueSet.add( converter.toRelationalValue( enumConstant ) );
}
return valueSet;
}
}
}
}
|
type
|
java
|
micronaut-projects__micronaut-core
|
core-processor/src/main/java/io/micronaut/inject/beans/visitor/BeanIntrospectionWriter.java
|
{
"start": 12307,
"end": 24243
}
|
class ____ the introspection will write.
*/
public String getIntrospectionName() {
return introspectionName;
}
/**
* @return The constructor.
*/
@Nullable
public MethodElement getConstructor() {
return constructor;
}
/**
* The bean type.
*
* @return The bean type
*/
public ClassTypeDef getBeanType() {
return beanType;
}
/**
* Visit a property.
*
* @param type The property type
* @param genericType The generic type
* @param name The property name
* @param readMember The read method
* @param readType The read type
* @param writeMember The write member
* @param writeType The write type
* @param isReadOnly Is read only
*/
void visitProperty(
@NonNull ClassElement type,
@NonNull ClassElement genericType,
@NonNull String name,
@Nullable MemberElement readMember,
@Nullable MemberElement writeMember,
@Nullable ClassElement readType,
@Nullable ClassElement writeType,
boolean isReadOnly) {
this.evaluatedExpressionProcessor.processEvaluatedExpressions(genericType.getAnnotationMetadata(), beanClassElement);
int readDispatchIndex = -1;
if (readMember != null) {
if (readMember instanceof MethodElement element) {
readDispatchIndex = dispatchWriter.addMethod(beanClassElement, element, true);
} else if (readMember instanceof FieldElement element) {
readDispatchIndex = dispatchWriter.addGetField(element);
} else {
throw new IllegalStateException();
}
}
int writeDispatchIndex = -1;
int withMethodIndex = -1;
if (writeMember != null) {
if (writeMember instanceof MethodElement element) {
writeDispatchIndex = dispatchWriter.addMethod(beanClassElement, element, true);
} else if (writeMember instanceof FieldElement element) {
writeDispatchIndex = dispatchWriter.addSetField(element);
} else {
throw new IllegalStateException();
}
}
boolean isMutable = !isReadOnly || hasAssociatedConstructorArgument(name, genericType);
if (isMutable) {
if (writeMember == null) {
final String prefix = this.annotationMetadata.stringValue(Introspected.class, "withPrefix").orElse("with");
ElementQuery<MethodElement> elementQuery = ElementQuery.of(MethodElement.class)
.onlyAccessible()
.onlyDeclared()
.onlyInstance()
.filter((methodElement -> {
ParameterElement[] parameters = methodElement.getParameters();
String methodName = methodElement.getName();
return methodName.startsWith(prefix) && methodName.equals(prefix + NameUtils.capitalize(name))
&& parameters.length == 1
&& methodElement.getGenericReturnType().getName().equals(beanClassElement.getName())
&& type.getType().isAssignable(parameters[0].getType());
}));
MethodElement withMethod = beanClassElement.getEnclosedElement(elementQuery).orElse(null);
if (withMethod != null) {
withMethodIndex = dispatchWriter.addMethod(beanClassElement, withMethod, true);
} else {
MethodElement constructor = this.constructor == null ? defaultConstructor : this.constructor;
if (constructor != null) {
if (copyConstructorDispatchTarget == null) {
copyConstructorDispatchTarget = new CopyConstructorDispatchTarget(beanType, beanProperties, dispatchWriter, constructor);
}
copyConstructorDispatchTarget.propertyNames.put(name, dispatchWriter.getDispatchTargets().size());
withMethodIndex = dispatchWriter.addDispatchTarget(copyConstructorDispatchTarget);
}
}
}
// Otherwise, set method would be used in BeanProperty
} else {
withMethodIndex = dispatchWriter.addDispatchTarget(new ExceptionDispatchTarget(
UnsupportedOperationException.class,
"Cannot mutate property [" + name + "] that is not mutable via a setter method, field or constructor argument for type: " + beanType.getName()
));
}
beanProperties.add(new BeanPropertyData(
name,
genericType,
readType,
writeType,
readDispatchIndex,
writeDispatchIndex,
withMethodIndex,
isReadOnly
));
}
/**
* Visits a bean method.
*
* @param element The method
*/
public void visitBeanMethod(MethodElement element) {
if (element != null && !element.isPrivate()) {
int dispatchIndex = dispatchWriter.addMethod(beanClassElement, element);
beanMethods.add(new BeanMethodData(element, dispatchIndex));
this.evaluatedExpressionProcessor.processEvaluatedExpressions(element.getAnnotationMetadata(), beanClassElement);
for (ParameterElement parameter : element.getParameters()) {
this.evaluatedExpressionProcessor.processEvaluatedExpressions(parameter.getAnnotationMetadata(), beanClassElement);
}
}
}
/**
* Builds an index for the given property and annotation.
*
* @param annotationName The annotation
* @param property The property
* @param value the value of the annotation
*/
void indexProperty(String annotationName, String property, @Nullable String value) {
indexByAnnotationAndValue.put(new AnnotationWithValue(annotationName, value), property);
indexByAnnotations.computeIfAbsent(annotationName, (a) -> new LinkedHashSet<>()).add(property);
}
/**
* Finish writing the introspection.
*/
public void finish() {
// Generate the bytecode in the round it's being invoked
output = generateIntrospectionClass();
evaluatedExpressionProcessor.finish();
}
@Override
public void accept(ClassWriterOutputVisitor classWriterOutputVisitor) throws IOException {
if (output != null) {
classWriterOutputVisitor.visitServiceDescriptor(BeanIntrospectionReference.class, introspectionName, beanClassElement);
try (OutputStream outputStream = classWriterOutputVisitor.visitClass(introspectionName, getOriginatingElements())) {
outputStream.write(output);
}
output = null;
this.evaluatedExpressionProcessor.writeEvaluatedExpressions(classWriterOutputVisitor);
}
}
private ExpressionDef pushBeanPropertyReference(BeanPropertyData beanPropertyData,
List<StatementDef> statements,
Function<String, ExpressionDef> loadClassValueExpressionFn) {
ClassTypeDef beanPropertyRefDef = ClassTypeDef.of(AbstractInitializableBeanIntrospection.BeanPropertyRef.class);
boolean mutable = !beanPropertyData.isReadOnly || hasAssociatedConstructorArgument(beanPropertyData.name, beanPropertyData.type);
StatementDef.DefineAndAssign defineAndAssign = ArgumentExpUtils.pushCreateArgument(
annotationMetadata,
beanClassElement,
introspectionTypeDef,
beanPropertyData.name,
beanPropertyData.type,
loadClassValueExpressionFn
).newLocal(beanPropertyData.name + "Arg");
statements.add(defineAndAssign);
VariableDef mainArgument = defineAndAssign.variable();
ExpressionDef readArgument = null;
ExpressionDef writeArgument = null;
if (beanPropertyData.type.equals(beanPropertyData.readType) && beanPropertyData.type.equals(beanPropertyData.writeType)) {
readArgument = mainArgument;
writeArgument = mainArgument;
} else if (beanPropertyData.type.equals(beanPropertyData.readType) && beanPropertyData.writeType == null) {
readArgument = mainArgument;
} else if (beanPropertyData.type.equals(beanPropertyData.writeType) && beanPropertyData.readType == null) {
writeArgument = mainArgument;
} else {
readArgument = beanPropertyData.readType == null ? null : ArgumentExpUtils.pushCreateArgument(
annotationMetadata,
beanClassElement,
introspectionTypeDef,
beanPropertyData.name,
beanPropertyData.readType,
loadClassValueExpressionFn
);
writeArgument = beanPropertyData.writeType == null ? null : ArgumentExpUtils.pushCreateArgument(
annotationMetadata,
beanClassElement,
introspectionTypeDef,
beanPropertyData.name,
beanPropertyData.writeType,
loadClassValueExpressionFn
);
}
return beanPropertyRefDef.instantiate(
BEAN_PROPERTY_REF_CONSTRUCTOR,
mainArgument,
readArgument == null ? ExpressionDef.nullValue() : readArgument,
writeArgument == null ? ExpressionDef.nullValue() : writeArgument,
ExpressionDef.constant(beanPropertyData.getDispatchIndex),
ExpressionDef.constant(beanPropertyData.setDispatchIndex),
ExpressionDef.constant(beanPropertyData.withMethodDispatchIndex),
ExpressionDef.constant(beanPropertyData.isReadOnly),
ExpressionDef.constant(mutable)
);
}
private ExpressionDef newBeanMethodRef(BeanMethodData beanMethodData, Function<String, ExpressionDef> loadClassValueExpressionFn) {
return ClassTypeDef.of(AbstractInitializableBeanIntrospection.BeanMethodRef.class)
.instantiate(
BEAN_METHOD_REF_CONSTRUCTOR,
// 1: return argument
ArgumentExpUtils.pushReturnTypeArgument(
annotationMetadata,
introspectionTypeDef,
beanMethodData.methodElement.getOwningType(),
beanMethodData.methodElement.getGenericReturnType(),
loadClassValueExpressionFn),
// 2: name
ExpressionDef.constant(beanMethodData.methodElement.getName()),
// 3: annotation metadata
getAnnotationMetadataExpression(beanMethodData.methodElement.getAnnotationMetadata(), loadClassValueExpressionFn),
// 4: arguments
beanMethodData.methodElement.getParameters().length == 0 ? ExpressionDef.nullValue() : ArgumentExpUtils.pushBuildArgumentsForMethod(
annotationMetadata,
beanClassElement,
introspectionTypeDef,
Arrays.asList(beanMethodData.methodElement.getParameters()),
loadClassValueExpressionFn
),
// 5: method index
ExpressionDef.constant(beanMethodData.dispatchIndex)
);
}
private ExpressionDef newEnumConstantRef(EnumConstantElement enumConstantElement, Function<String, ExpressionDef> loadClassValueExpressionFn) {
return ClassTypeDef.of(
AbstractEnumBeanIntrospectionAndReference.EnumConstantDynamicRef.class
).instantiate(
ENUM_CONSTANT_DYNAMIC_REF_CONSTRUCTOR,
// 1: push annotation
|
that
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineDelegationTokenResponse.java
|
{
"start": 1355,
"end": 1806
}
|
class ____ {
private String type;
private Object content;
public TimelineDelegationTokenResponse() {
}
@XmlElement(name = "type")
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
@XmlElement(name = "content")
public Object getContent() {
return content;
}
public void setContent(Object content) {
this.content = content;
}
}
|
TimelineDelegationTokenResponse
|
java
|
google__guava
|
android/guava/src/com/google/common/io/LittleEndianDataOutputStream.java
|
{
"start": 1159,
"end": 1416
}
|
class ____ violates the specification of its supertype {@code
* DataOutput}, which explicitly requires big-endian byte order.
*
* @author Chris Nokleberg
* @author Keith Bottner
* @since 8.0
*/
@J2ktIncompatible
@GwtIncompatible
public final
|
intentionally
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/components/dynamic/AuditedDynamicComponentTest.java
|
{
"start": 1529,
"end": 8495
}
|
class ____ {
//@Test
public void testAuditedDynamicComponentFailure(EntityManagerFactoryScope scope) throws URISyntaxException {
final Configuration config = new Configuration();
final URL hbm = Thread.currentThread().getContextClassLoader().getResource(
"mappings/dynamicComponents/mapAudited.hbm.xml"
);
config.addFile( new File( hbm.toURI() ) );
scope.inEntityManager( em -> {
final var auditStrategyClass = getAuditStrategy( em ).getClass();
if ( auditStrategyClass != DefaultAuditStrategy.class ) {
config.setProperty( EnversSettings.AUDIT_STRATEGY, auditStrategyClass.getName() );
}
} );
final ServiceRegistry serviceRegistry = ServiceRegistryBuilder.buildServiceRegistry( config.getProperties() );
try {
config.buildSessionFactory( serviceRegistry ).close();
fail( "MappingException expected" );
}
catch ( MappingException e ) {
assertEquals(
"Audited dynamic-component properties are not supported. Consider applying @NotAudited annotation to "
+ AuditedDynamicComponentEntity.class.getName() + "#customFields.",
e.getMessage()
);
}
finally {
ServiceRegistryBuilder.destroy( serviceRegistry );
}
}
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
scope.inTransaction( em -> {
SimpleEntity simpleEntity = new SimpleEntity( 1L, "Very simple entity" );
em.persist( simpleEntity );
} );
// Revision 2
scope.inTransaction( em -> {
SimpleEntity simpleEntity = em.find( SimpleEntity.class, 1L );
AuditedDynamicComponentEntity entity = new AuditedDynamicComponentEntity( 1L, "static field value" );
entity.getCustomFields().put( "prop1", 13 );
entity.getCustomFields().put( "prop2", 0.1f );
entity.getCustomFields().put( "prop3", simpleEntity );
entity.getCustomFields().put( "prop4", true );
em.persist( entity );
} );
// revision 3
scope.inTransaction( em -> {
SimpleEntity simpleEntity2 = new SimpleEntity( 2L, "Not so simple entity" );
em.persist( simpleEntity2 );
AuditedDynamicComponentEntity entity = em.find( AuditedDynamicComponentEntity.class, 1L );
entity.getCustomFields().put( "prop3", simpleEntity2 );
em.merge( entity );
} );
// Revision 4
scope.inTransaction( em -> {
AuditedDynamicComponentEntity entity = em.find( AuditedDynamicComponentEntity.class, 1L );
entity.getCustomFields().put( "prop1", 2 );
entity.getCustomFields().put( "prop4", false );
em.merge( entity );
} );
// Revision 5
scope.inTransaction( em -> {
AuditedDynamicComponentEntity entity = em.getReference( AuditedDynamicComponentEntity.class, 1L );
entity.getCustomFields().remove( "prop2" );
em.merge( entity );
} );
// Revision 6
scope.inTransaction( em -> {
AuditedDynamicComponentEntity entity = em.getReference( AuditedDynamicComponentEntity.class, 1L );
entity.getCustomFields().clear();
em.merge( entity );
} );
// Revision 7
scope.inTransaction( em -> {
AuditedDynamicComponentEntity entity = em.getReference( AuditedDynamicComponentEntity.class, 1L );
em.remove( entity );
} );
}
@Test
public void testRevisionsCounts(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
assertEquals(
Arrays.asList( 2, 3, 4, 5, 6, 7 ),
AuditReaderFactory.get( em ).getRevisions( AuditedDynamicComponentEntity.class, 1L )
);
} );
}
@Test
public void testHistoryOfId1(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
// Revision 2
AuditedDynamicComponentEntity entity = new AuditedDynamicComponentEntity( 1L, "static field value" );
entity.getCustomFields().put( "prop1", 13 );
entity.getCustomFields().put( "prop2", 0.1f );
entity.getCustomFields().put( "prop3", new SimpleEntity( 1L, "Very simple entity" ) );
entity.getCustomFields().put( "prop4", true );
AuditedDynamicComponentEntity ver2 = auditReader.find(
AuditedDynamicComponentEntity.class,
entity.getId(),
2
);
assertEquals( entity, ver2 );
// Revision 3
SimpleEntity simpleEntity2 = new SimpleEntity( 2L, "Not so simple entity" );
entity.getCustomFields().put( "prop3", simpleEntity2 );
AuditedDynamicComponentEntity ver3 = auditReader.find(
AuditedDynamicComponentEntity.class,
entity.getId(),
3
);
assertEquals( entity, ver3 );
// Revision 4
entity.getCustomFields().put( "prop1", 2 );
entity.getCustomFields().put( "prop4", false );
AuditedDynamicComponentEntity ver4 = auditReader.find(
AuditedDynamicComponentEntity.class,
entity.getId(),
4
);
assertEquals( entity, ver4 );
// Revision 5
entity.getCustomFields().put( "prop2", null );
AuditedDynamicComponentEntity ver5 = auditReader.find(
AuditedDynamicComponentEntity.class,
entity.getId(),
5
);
assertEquals( entity, ver5 );
// Revision 6
entity.getCustomFields().put( "prop1", null );
entity.getCustomFields().put( "prop2", null );
entity.getCustomFields().put( "prop3", null );
entity.getCustomFields().put( "prop4", null );
AuditedDynamicComponentEntity ver6 = auditReader.find(
AuditedDynamicComponentEntity.class,
entity.getId(),
6
);
assertEquals( entity, ver6 );
} );
}
@Test
public void testOfQueryOnDynamicComponent(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
//given (and result of initData()
AuditedDynamicComponentEntity entity = new AuditedDynamicComponentEntity( 1L, "static field value" );
entity.getCustomFields().put( "prop1", 13 );
entity.getCustomFields().put( "prop2", 0.1f );
entity.getCustomFields().put( "prop3", new SimpleEntity( 1L, "Very simple entity" ) );
entity.getCustomFields().put( "prop4", true );
//when
List resultList = auditReader.createQuery()
.forEntitiesAtRevision( AuditedDynamicComponentEntity.class, 2 )
.add( AuditEntity.property( "customFields_prop1" ).le( 20 ) )
.getResultList();
//then
assertEquals( entity, resultList.get( 0 ) );
//when
resultList = auditReader.createQuery()
.forEntitiesAtRevision( AuditedDynamicComponentEntity.class, 2 )
.add( AuditEntity.property( "customFields_prop3" ).eq( new SimpleEntity( 1L, "Very simple entity" ) ) )
.getResultList();
//then
AuditedDynamicComponentEntity entity2 = (AuditedDynamicComponentEntity) auditReader.createQuery()
.forEntitiesAtRevision( AuditedDynamicComponentEntity.class, 4 )
.getResultList().get( 0 );
entity2.getCustomFields().put( "prop2", null );
resultList = auditReader.createQuery()
.forEntitiesAtRevision( AuditedDynamicComponentEntity.class, 5 )
.add( AuditEntity.property( "customFields_prop2" ).isNull() )
.getResultList();
//then
assertEquals( entity2, resultList.get( 0 ) );
} );
}
}
|
AuditedDynamicComponentTest
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/api/reactive/BaseRedisReactiveCommands.java
|
{
"start": 1301,
"end": 6306
}
|
interface ____<K, V> {
/**
* Post a message to a channel.
*
* @param channel the channel type: key.
* @param message the message type: value.
* @return Long integer-reply the number of clients that received the message.
*/
Mono<Long> publish(K channel, V message);
/**
* Lists the currently *active channels*.
*
* @return K array-reply a list of active channels, optionally matching the specified pattern.
*/
Flux<K> pubsubChannels();
/**
* Lists the currently *active channels*.
*
* @param channel the key.
* @return K array-reply a list of active channels, optionally matching the specified pattern.
*/
Flux<K> pubsubChannels(K channel);
/**
* Returns the number of subscribers (not counting clients subscribed to patterns) for the specified channels.
*
* @param channels channel keys.
* @return array-reply a list of channels and number of subscribers for every channel.
*/
Mono<Map<K, Long>> pubsubNumsub(K... channels);
/**
* Lists the currently *active shard channels*.
*
* @return K array-reply a list of active channels.
*/
Flux<K> pubsubShardChannels();
/**
* Lists the currently *active shard channels*.
*
* @param pattern the pattern type: patternkey (pattern).
* @return K array-reply a list of active channels, optionally matching the specified pattern.
*/
Flux<K> pubsubShardChannels(K pattern);
/**
* Returns the number of subscribers (not counting clients subscribed to patterns) for the specified shard channels.
*
* @param shardChannels channel keys.
* @return array-reply a list of channels and number of subscribers for every channel.
* @since 6.4
*/
Mono<Map<K, Long>> pubsubShardNumsub(K... shardChannels);
/**
* Returns the number of subscriptions to patterns.
*
* @return Long integer-reply the number of patterns all the clients are subscribed to.
*/
Mono<Long> pubsubNumpat();
/**
* Post a message to a shard channel.
*
* @param shardChannel the shard channel type: key.
* @param message the message type: value.
* @return Long integer-reply the number of clients that received the message.
* @since 6.4
*/
Mono<Long> spublish(K shardChannel, V message);
/**
* Echo the given string.
*
* @param msg the message type: value.
* @return V bulk-string-reply.
*/
Mono<V> echo(V msg);
/**
* Return the role of the instance in the context of replication.
*
* @return Object array-reply where the first element is one of master, slave, sentinel and the additional elements are
* role-specific.
*/
Flux<Object> role();
/**
* Ping the server.
*
* @return String simple-string-reply.
*/
Mono<String> ping();
/**
* Switch connection to Read-Only mode when connecting to a cluster.
*
* @return String simple-string-reply.
*/
Mono<String> readOnly();
/**
* Switch connection to Read-Write mode (default) when connecting to a cluster.
*
* @return String simple-string-reply.
*/
Mono<String> readWrite();
/**
* Instructs Redis to disconnect the connection. Note that if auto-reconnect is enabled then Lettuce will auto-reconnect if
* the connection was disconnected. Use {@link io.lettuce.core.api.StatefulConnection#close} to close connections and
* release resources.
*
* @return String simple-string-reply always OK.
*/
Mono<String> quit();
/**
* Wait for replication.
*
* @param replicas minimum number of replicas.
* @param timeout timeout in milliseconds.
* @return number of replicas.
*/
Mono<Long> waitForReplication(int replicas, long timeout);
/**
* Dispatch a command to the Redis Server. Please note the command output type must fit to the command response.
*
* @param type the command, must not be {@code null}.
* @param output the command output, must not be {@code null}.
* @param <T> response type.
* @return the command response.
*/
<T> Flux<T> dispatch(ProtocolKeyword type, CommandOutput<K, V, ?> output);
/**
* Dispatch a command to the Redis Server. Please note the command output type must fit to the command response.
*
* @param type the command, must not be {@code null}.
* @param output the command output, must not be {@code null}.
* @param args the command arguments, must not be {@code null}.
* @param <T> response type.
* @return the command response.
*/
<T> Flux<T> dispatch(ProtocolKeyword type, CommandOutput<K, V, ?> output, CommandArgs<K, V> args);
/**
* @return the currently configured instance of the {@link JsonParser}
* @since 6.5
*/
JsonParser getJsonParser();
}
|
BaseRedisReactiveCommands
|
java
|
apache__dubbo
|
dubbo-spring-boot-project/dubbo-spring-boot/src/main/java/org/apache/dubbo/spring/boot/interceptor/DubboTagCookieInterceptor.java
|
{
"start": 1147,
"end": 2173
}
|
class ____ implements HandlerInterceptor {
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler)
throws Exception {
String tag = getSingleCookieValue(request.getCookies(), CommonConstants.TAG_KEY);
RpcContext.getClientAttachment().setAttachment(CommonConstants.TAG_KEY, tag);
return true;
}
@Override
public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex)
throws Exception {
RpcContext.getClientAttachment().removeAttachment(CommonConstants.TAG_KEY);
}
private static String getSingleCookieValue(Cookie[] cookies, String name) {
if (cookies == null || cookies.length == 0) {
return null;
}
for (Cookie cookie : cookies) {
if (name.equals(cookie.getName())) {
return cookie.getValue();
}
}
return null;
}
}
|
DubboTagCookieInterceptor
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLogRace.java
|
{
"start": 3424,
"end": 3564
}
|
class ____ various synchronization bugs in FSEditLog rolling
* and namespace saving.
*/
@MethodSource("data")
@ParameterizedClass
public
|
tests
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/voyageai/VoyageAIResponseHandlerTests.java
|
{
"start": 1200,
"end": 6943
}
|
class ____ extends ESTestCase {
public void testCheckForFailureStatusCode_DoesNotThrowForStatusCodesBetween200And299() {
callCheckForFailureStatusCode(randomIntBetween(200, 299), "id");
}
public void testCheckForFailureStatusCode_ThrowsFor503() {
var exception = expectThrows(RetryException.class, () -> callCheckForFailureStatusCode(503, "id"));
assertFalse(exception.shouldRetry());
MatcherAssert.assertThat(
exception.getCause().getMessage(),
containsString("Received a server error status code for request from inference entity id [id] status [503]")
);
MatcherAssert.assertThat(((ElasticsearchStatusException) exception.getCause()).status(), is(RestStatus.BAD_REQUEST));
}
public void testCheckForFailureStatusCode_ThrowsFor500_WithShouldRetryTrue() {
var exception = expectThrows(RetryException.class, () -> callCheckForFailureStatusCode(500, "id"));
assertTrue(exception.shouldRetry());
MatcherAssert.assertThat(
exception.getCause().getMessage(),
containsString("Received a server error status code for request from inference entity id [id] status [500]")
);
MatcherAssert.assertThat(((ElasticsearchStatusException) exception.getCause()).status(), is(RestStatus.BAD_REQUEST));
}
public void testCheckForFailureStatusCode_ThrowsFor429_WithShouldRetryTrue() {
var exception = expectThrows(RetryException.class, () -> callCheckForFailureStatusCode(429, "id"));
assertTrue(exception.shouldRetry());
MatcherAssert.assertThat(
exception.getCause().getMessage(),
containsString("Received a rate limit status code for request from inference entity id [id] status [429]")
);
MatcherAssert.assertThat(((ElasticsearchStatusException) exception.getCause()).status(), is(RestStatus.TOO_MANY_REQUESTS));
}
public void testCheckForFailureStatusCode_ThrowsFor400() {
var exception = expectThrows(RetryException.class, () -> callCheckForFailureStatusCode(400, "id"));
assertFalse(exception.shouldRetry());
MatcherAssert.assertThat(
exception.getCause().getMessage(),
containsString("Received an input validation error response for request from inference entity id [id] status [400]")
);
MatcherAssert.assertThat(((ElasticsearchStatusException) exception.getCause()).status(), is(RestStatus.BAD_REQUEST));
}
public void testCheckForFailureStatusCode_ThrowsFor400_InputsTooLarge() {
var exception = expectThrows(
RetryException.class,
() -> callCheckForFailureStatusCode(400, "\"input\" length 2049 is larger than the largest allowed size 2048", "id")
);
assertFalse(exception.shouldRetry());
MatcherAssert.assertThat(
exception.getCause().getMessage(),
containsString("Received an input validation error response for request from inference entity id [id] status [400]")
);
MatcherAssert.assertThat(((ElasticsearchStatusException) exception.getCause()).status(), is(RestStatus.BAD_REQUEST));
}
public void testCheckForFailureStatusCode_ThrowsFor401() {
var exception = expectThrows(RetryException.class, () -> callCheckForFailureStatusCode(401, "inferenceEntityId"));
assertFalse(exception.shouldRetry());
MatcherAssert.assertThat(
exception.getCause().getMessage(),
containsString(
"Received an authentication error status code for request from inference entity id [inferenceEntityId] status [401]"
)
);
MatcherAssert.assertThat(((ElasticsearchStatusException) exception.getCause()).status(), is(RestStatus.UNAUTHORIZED));
}
public void testCheckForFailureStatusCode_ThrowsFor402() {
var exception = expectThrows(RetryException.class, () -> callCheckForFailureStatusCode(402, "inferenceEntityId"));
assertFalse(exception.shouldRetry());
MatcherAssert.assertThat(exception.getCause().getMessage(), containsString("Payment required"));
MatcherAssert.assertThat(((ElasticsearchStatusException) exception.getCause()).status(), is(RestStatus.PAYMENT_REQUIRED));
}
private static void callCheckForFailureStatusCode(int statusCode, String modelId) {
callCheckForFailureStatusCode(statusCode, null, modelId);
}
private static void callCheckForFailureStatusCode(int statusCode, @Nullable String errorMessage, String modelId) {
var statusLine = mock(StatusLine.class);
when(statusLine.getStatusCode()).thenReturn(statusCode);
var httpResponse = mock(HttpResponse.class);
when(httpResponse.getStatusLine()).thenReturn(statusLine);
var header = mock(Header.class);
when(header.getElements()).thenReturn(new HeaderElement[] {});
when(httpResponse.getFirstHeader(anyString())).thenReturn(header);
String escapedErrorMessage = errorMessage != null ? errorMessage.replace("\\", "\\\\").replace("\"", "\\\"") : errorMessage;
String responseJson = Strings.format("""
{
"detail": "%s"
}
""", escapedErrorMessage);
var mockRequest = mock(Request.class);
when(mockRequest.getInferenceEntityId()).thenReturn(modelId);
var httpResult = new HttpResult(httpResponse, errorMessage == null ? new byte[] {} : responseJson.getBytes(StandardCharsets.UTF_8));
var handler = new VoyageAIResponseHandler("", (request, result) -> null);
handler.checkForFailureStatusCode(mockRequest, httpResult);
}
}
|
VoyageAIResponseHandlerTests
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/TestingResultPartitionProvider.java
|
{
"start": 3349,
"end": 5274
}
|
class ____ {
private CreateSubpartitionView createSubpartitionViewFunction =
(resultPartitionID, indexSet, availabilityListener) -> null;
private CreateSubpartitionViewOrRegisterListener
createSubpartitionViewOrRegisterListenerFunction =
(partitionId, indexSet, availabilityListener, partitionRequestListener) ->
Optional.empty();
private ReleasePartitionRequestListener releasePartitionRequestListenerConsumer =
listener -> {};
public TestingResultPartitionProviderBuilder setCreateSubpartitionViewFunction(
CreateSubpartitionView createSubpartitionViewFunction) {
this.createSubpartitionViewFunction = createSubpartitionViewFunction;
return this;
}
public TestingResultPartitionProviderBuilder setCreateSubpartitionViewOrNotifyFunction(
CreateSubpartitionViewOrRegisterListener
createSubpartitionViewOrRegisterListenerFunction) {
this.createSubpartitionViewOrRegisterListenerFunction =
createSubpartitionViewOrRegisterListenerFunction;
return this;
}
public TestingResultPartitionProviderBuilder setReleasePartitionRequestListenerConsumer(
ReleasePartitionRequestListener releasePartitionRequestListenerConsumer) {
this.releasePartitionRequestListenerConsumer = releasePartitionRequestListenerConsumer;
return this;
}
public TestingResultPartitionProvider build() {
return new TestingResultPartitionProvider(
createSubpartitionViewFunction,
createSubpartitionViewOrRegisterListenerFunction,
releasePartitionRequestListenerConsumer);
}
}
/** Testing
|
TestingResultPartitionProviderBuilder
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/lock/OptimisticLockTests.java
|
{
"start": 833,
"end": 2148
}
|
class ____ {
@Test
@JiraKey(value = "HHH-9419")
public void testNoVersionCheckAfterRemove(SessionFactoryScope sessions) {
final SQLStatementInspector sqlCollector = sessions.getCollectingStatementInspector();
sqlCollector.clear();
final Lockable created = sessions.fromTransaction( (session) -> {
final Lockable entity = new Lockable( "name" );
session.persist( entity );
return entity;
} );
assertThat( created.getVersion() ).isEqualTo( 0 );
final Lockable locked = sessions.fromTransaction( (session) -> {
final ActionQueue actionQueue = session.unwrap( SessionImplementor.class ).getActionQueue();
assertThat( actionQueue.hasBeforeTransactionActions() ).isFalse();
final Lockable loaded = session.createQuery( "from Lockable", Lockable.class )
.setLockMode( LockModeType.OPTIMISTIC )
.getSingleResult();
assertThat( loaded.getVersion() ).isEqualTo( 0 );
assertThat( actionQueue.hasBeforeTransactionActions() ).isTrue();
sqlCollector.clear();
session.remove( loaded );
return loaded;
} );
assertThat( locked.getVersion() ).isEqualTo( 0 );
// should be just the deletion
assertThat( sqlCollector.getSqlQueries() ).hasSize( 1 );
assertThat( sqlCollector.getSqlQueries().get( 0 ) ).startsWith( "delete from Lockable " );
}
}
|
OptimisticLockTests
|
java
|
apache__camel
|
components/camel-test/camel-test-spring-junit5/src/main/java/org/apache/camel/test/spring/junit5/RouteCoverageEventNotifier.java
|
{
"start": 1092,
"end": 2008
}
|
class ____ extends EventNotifierSupport {
private final String testClassName;
private final Function<RouteCoverageEventNotifier, String> testMethodName;
public RouteCoverageEventNotifier(String testClassName, Function<RouteCoverageEventNotifier, String> testMethodName) {
this.testClassName = testClassName;
this.testMethodName = testMethodName;
setIgnoreCamelContextEvents(false);
setIgnoreExchangeEvents(true);
}
@Override
public boolean isEnabled(CamelEvent event) {
return event instanceof CamelContextStoppingEvent;
}
@Override
public void notify(CamelEvent event) throws Exception {
CamelContext context = ((CamelContextStoppingEvent) event).getContext();
String testName = testMethodName.apply(this);
RouteCoverageDumper.dumpRouteCoverage(context, testClassName, testName);
}
}
|
RouteCoverageEventNotifier
|
java
|
apache__dubbo
|
dubbo-metadata/dubbo-metadata-api/src/main/java/org/apache/dubbo/metadata/MetadataInfo.java
|
{
"start": 10051,
"end": 10278
}
|
interface ____ specified group, version.
* There may have several service infos of different protocols, this method will simply pick the first one.
*
* @param serviceKeyWithoutProtocol key is of format '{group}/{
|
with
|
java
|
quarkusio__quarkus
|
integration-tests/main/src/test/java/io/quarkus/it/main/RBACAccessTest.java
|
{
"start": 401,
"end": 5161
}
|
class ____ {
@Test
public void shouldRestrictAccessToSpecificRole() {
String path = "/rbac-secured/forTesterOnly";
assertForAnonymous(path, 401, Optional.empty());
assertStatusAndContent(RestAssured.given().auth().preemptive().basic("stuart", "test"), path, 403, Optional.empty());
assertStatusAndContent(RestAssured.given().auth().preemptive().basic("scott", "jb0ss"), path, 200,
Optional.of("forTesterOnly"));
}
@Test
public void shouldRestrictAccessToSpecificRoleConfigExp() {
String path = "/rbac-secured/forTesterOnlyConfigExp";
assertForAnonymous(path, 401, Optional.empty());
assertStatusAndContent(RestAssured.given().auth().preemptive().basic("stuart", "test"), path, 403, Optional.empty());
assertStatusAndContent(RestAssured.given().auth().preemptive().basic("scott", "jb0ss"), path, 200,
Optional.of("forTesterOnlyConfigExp"));
}
@Test
public void shouldRestrictAccessToSpecificRoleAndMethodParameterAnnotationsShouldntAffectAnything() {
String path = "/rbac-secured/forTesterOnlyWithMethodParamAnnotations";
assertForAnonymous(path, 401, Optional.empty());
assertStatusAndContent(RestAssured.given().auth().preemptive().basic("stuart", "test"), path, 403, Optional.empty());
assertStatusAndContent(RestAssured.given().auth().preemptive().basic("scott", "jb0ss"), path, 200,
Optional.of("forTesterOnlyWithMethodParamAnnotations"));
}
@Test
public void shouldFailToAccessForbidden() {
assertForAnonymous("/rbac-secured/denied", 401, Optional.empty());
assertForUsers("/rbac-secured/denied", 403, Optional.empty());
}
@Test
public void shouldAccessAllowed() {
assertForAnonymous("/rbac-secured/permitted", 200, Optional.of("permitted"));
assertForUsers("/rbac-secured/permitted", 200, Optional.of("permitted"));
}
@Test
public void shouldRestrictAuthenticated() {
assertForAnonymous("/rbac-secured/authenticated", 401, Optional.empty());
assertForUsers("/rbac-secured/authenticated", 200, Optional.of("authenticated"));
}
@Test
public void shouldRestrictAllRoles() {
assertForAnonymous("/rbac-secured/allRoles", 401, Optional.empty());
assertForUsers("/rbac-secured/allRoles", 200, Optional.of("allRoles"));
}
@Test
public void shouldRestrictAccessToSpecificRoleOnBean() {
String path = "/rbac-secured/callingTesterOnly";
assertForAnonymous(path, 401, Optional.empty());
assertStatusAndContent(RestAssured.given().auth().preemptive().basic("stuart", "test"), path, 403, Optional.empty());
assertStatusAndContent(RestAssured.given().auth().preemptive().basic("scott", "jb0ss"), path, 200,
Optional.of("callingTesterOnly"));
}
@Test
public void shouldFailToAccessForbiddenOnBean() {
assertForAnonymous("/rbac-secured/callingDenied", 401, Optional.empty());
assertForUsers("/rbac-secured/callingDenied", 403, Optional.empty());
}
@Test
public void shouldAccessAllowedOnBean() {
assertForAnonymous("/rbac-secured/callingPermitted", 200, Optional.of("callingPermitted"));
assertForUsers("/rbac-secured/callingPermitted", 200, Optional.of("callingPermitted"));
}
@Test
public void shouldRestrictAuthenticatedOnBean() {
assertForAnonymous("/rbac-secured/callingAuthenticated", 401, Optional.empty());
assertForUsers("/rbac-secured/callingAuthenticated", 200, Optional.of("callingAuthenticated"));
}
@Test
public void shouldRestrictAllRolesOnBean() {
assertForAnonymous("/rbac-secured/callingAllRoles", 401, Optional.empty());
assertForUsers("/rbac-secured/callingAllRoles", 200, Optional.of("callingAllRoles"));
}
private void assertForAnonymous(String path, int status, Optional<String> content) {
assertStatusAndContent(RestAssured.given(), path, status, content);
}
private void assertForUsers(String path, int status, Optional<String> content) {
assertStatusAndContent(RestAssured.given().auth().preemptive().basic("stuart", "test"), path, status, content);
assertStatusAndContent(RestAssured.given().auth().preemptive().basic("scott", "jb0ss"), path, status, content);
}
private void assertStatusAndContent(RequestSpecification request, String path, int status, Optional<String> content) {
ValidatableResponse validatableResponse = request.when().get(path)
.then()
.statusCode(status);
content.ifPresent(text -> validatableResponse.body(Matchers.equalTo(text)));
}
}
|
RBACAccessTest
|
java
|
apache__maven
|
impl/maven-impl/src/main/java/org/apache/maven/impl/cache/Cache.java
|
{
"start": 11732,
"end": 12419
}
|
class ____<T> extends RefConcurrentReference<T> {
private final T referent;
HardRefConcurrentReference(T referent, ReferenceQueue<T> queue) {
super(referent);
this.referent = referent;
// Note: queue is ignored for hard references since they're never GC'd
}
@Override
public Reference<T> getReference() {
// Return null since hard references don't use Reference objects
return null;
}
@Override
public T get() {
return referent;
}
}
// Base
|
HardRefConcurrentReference
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAHSClient.java
|
{
"start": 3291,
"end": 7970
}
|
class ____ {
@Test
public void testClientStop() {
Configuration conf = new Configuration();
AHSClient client = AHSClient.createAHSClient();
client.init(conf);
client.start();
client.stop();
}
@Test
@Timeout(value = 10)
public void testGetApplications() throws YarnException, IOException {
Configuration conf = new Configuration();
final AHSClient client = new MockAHSClient();
client.init(conf);
client.start();
List<ApplicationReport> expectedReports =
((MockAHSClient) client).getReports();
List<ApplicationReport> reports = client.getApplications();
assertEquals(reports, expectedReports);
reports = client.getApplications();
assertThat(reports).hasSize(4);
client.stop();
}
@Test
@Timeout(value = 10)
public void testGetApplicationReport() throws YarnException, IOException {
Configuration conf = new Configuration();
final AHSClient client = new MockAHSClient();
client.init(conf);
client.start();
List<ApplicationReport> expectedReports =
((MockAHSClient) client).getReports();
ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
ApplicationReport report = client.getApplicationReport(applicationId);
assertEquals(report, expectedReports.get(0));
assertEquals(report.getApplicationId().toString(), expectedReports
.get(0).getApplicationId().toString());
assertEquals(report.getSubmitTime(), expectedReports.get(0)
.getSubmitTime());
client.stop();
}
@Test
@Timeout(value = 10)
public void testGetApplicationAttempts() throws YarnException, IOException {
Configuration conf = new Configuration();
final AHSClient client = new MockAHSClient();
client.init(conf);
client.start();
ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
List<ApplicationAttemptReport> reports =
client.getApplicationAttempts(applicationId);
assertNotNull(reports);
assertEquals(reports.get(0).getApplicationAttemptId(),
ApplicationAttemptId.newInstance(applicationId, 1));
assertEquals(reports.get(1).getApplicationAttemptId(),
ApplicationAttemptId.newInstance(applicationId, 2));
client.stop();
}
@Test
@Timeout(value = 10)
public void testGetApplicationAttempt() throws YarnException, IOException {
Configuration conf = new Configuration();
final AHSClient client = new MockAHSClient();
client.init(conf);
client.start();
List<ApplicationReport> expectedReports =
((MockAHSClient) client).getReports();
ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(applicationId, 1);
ApplicationAttemptReport report =
client.getApplicationAttemptReport(appAttemptId);
assertNotNull(report);
assertEquals(report.getApplicationAttemptId().toString(),
expectedReports.get(0).getCurrentApplicationAttemptId().toString());
client.stop();
}
@Test
@Timeout(value = 10)
public void testGetContainers() throws YarnException, IOException {
Configuration conf = new Configuration();
final AHSClient client = new MockAHSClient();
client.init(conf);
client.start();
ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(applicationId, 1);
List<ContainerReport> reports = client.getContainers(appAttemptId);
assertNotNull(reports);
assertEquals(reports.get(0).getContainerId(),
(ContainerId.newContainerId(appAttemptId, 1)));
assertEquals(reports.get(1).getContainerId(),
(ContainerId.newContainerId(appAttemptId, 2)));
client.stop();
}
@Test
@Timeout(value = 10)
public void testGetContainerReport() throws YarnException, IOException {
Configuration conf = new Configuration();
final AHSClient client = new MockAHSClient();
client.init(conf);
client.start();
List<ApplicationReport> expectedReports =
((MockAHSClient) client).getReports();
ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(applicationId, 1);
ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
ContainerReport report = client.getContainerReport(containerId);
assertNotNull(report);
assertEquals(report.getContainerId().toString(), (ContainerId
.newContainerId(expectedReports.get(0).getCurrentApplicationAttemptId(), 1))
.toString());
client.stop();
}
private static
|
TestAHSClient
|
java
|
apache__camel
|
components/camel-swift/src/main/java/org/apache/camel/dataformat/swift/mx/ReadConfiguration.java
|
{
"start": 1174,
"end": 1422
}
|
class ____ extends MxReadConfiguration {
public TypeAdaptersConfiguration getAdapters() {
return adapters;
}
public void setAdapters(TypeAdaptersConfiguration adapters) {
this.adapters = adapters;
}
}
|
ReadConfiguration
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/DefaultSyncPreferredLocationsRetrieverTest.java
|
{
"start": 1314,
"end": 3017
}
|
class ____ {
private static final JobVertexID JV1 = new JobVertexID();
private static final ExecutionVertexID EV11 = new ExecutionVertexID(JV1, 0);
private static final ExecutionVertexID EV12 = new ExecutionVertexID(JV1, 1);
private static final ExecutionVertexID EV13 = new ExecutionVertexID(JV1, 2);
private static final ExecutionVertexID EV14 = new ExecutionVertexID(JV1, 3);
private static final ExecutionVertexID EV21 = new ExecutionVertexID(new JobVertexID(), 0);
@Test
void testAvailableInputLocationRetrieval() {
TestingInputsLocationsRetriever originalLocationRetriever =
new TestingInputsLocationsRetriever.Builder()
.connectConsumerToProducers(EV21, Arrays.asList(EV11, EV12, EV13, EV14))
.build();
originalLocationRetriever.assignTaskManagerLocation(EV11);
originalLocationRetriever.markScheduled(EV12);
originalLocationRetriever.failTaskManagerLocation(EV13, new Throwable());
originalLocationRetriever.cancelTaskManagerLocation(EV14);
SyncPreferredLocationsRetriever locationsRetriever =
new DefaultSyncPreferredLocationsRetriever(
executionVertexId -> Optional.empty(), originalLocationRetriever);
Collection<TaskManagerLocation> preferredLocations =
locationsRetriever.getPreferredLocations(EV21, Collections.emptySet());
TaskManagerLocation expectedLocation =
originalLocationRetriever.getTaskManagerLocation(EV11).get().join();
assertThat(preferredLocations).containsExactly(expectedLocation);
}
}
|
DefaultSyncPreferredLocationsRetrieverTest
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeRecovery.java
|
{
"start": 10439,
"end": 11501
}
|
class ____ extends EditLogTestSetup {
private final int paddingLength;
public EltsTestOpcodesAfterPadding(int paddingLength) {
this.paddingLength = paddingLength;
}
@Override
public void addTransactionsToLog(EditLogOutputStream elos,
OpInstanceCache cache) throws IOException {
padEditLog(elos, paddingLength);
addDeleteOpcode(elos, cache, 0, "/foo");
}
@Override
public long getLastValidTxId() {
return 0;
}
@Override
public Set<Long> getValidTxIds() {
return new HashSet<>(Arrays.asList(0L));
}
}
@Test
@Timeout(value = 180)
public void testOpcodesAfterPadding() throws IOException {
runEditLogTest(new EltsTestOpcodesAfterPadding(
EditLogFileOutputStream.MIN_PREALLOCATION_LENGTH));
}
@Test
@Timeout(value = 180)
public void testOpcodesAfterExtraPadding() throws IOException {
runEditLogTest(new EltsTestOpcodesAfterPadding(
3 * EditLogFileOutputStream.MIN_PREALLOCATION_LENGTH));
}
private static
|
EltsTestOpcodesAfterPadding
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/main/java/org/springframework/boot/context/properties/bind/BindHandler.java
|
{
"start": 1005,
"end": 4139
}
|
interface ____ {
/**
* Default no-op bind handler.
*/
BindHandler DEFAULT = new BindHandler() {
};
/**
* Called when binding of an element starts but before any result has been determined.
* @param <T> the bindable source type
* @param name the name of the element being bound
* @param target the item being bound
* @param context the bind context
* @return the actual item that should be used for binding (may be {@code null})
*/
default <T> @Nullable Bindable<T> onStart(ConfigurationPropertyName name, Bindable<T> target, BindContext context) {
return target;
}
/**
* Called when binding of an element ends with a successful result. Implementations
* may change the ultimately returned result or perform addition validation.
* @param name the name of the element being bound
* @param target the item being bound
* @param context the bind context
* @param result the bound result (never {@code null})
* @return the actual result that should be used (may be {@code null})
*/
default @Nullable Object onSuccess(ConfigurationPropertyName name, Bindable<?> target, BindContext context,
Object result) {
return result;
}
/**
* Called when binding of an element ends with an unbound result and a newly created
* instance is about to be returned. Implementations may change the ultimately
* returned result or perform addition validation.
* @param name the name of the element being bound
* @param target the item being bound
* @param context the bind context
* @param result the newly created instance (never {@code null})
* @return the actual result that should be used (must not be {@code null})
* @since 2.2.2
*/
default Object onCreate(ConfigurationPropertyName name, Bindable<?> target, BindContext context, Object result) {
return result;
}
/**
* Called when binding fails for any reason (including failures from
* {@link #onSuccess} or {@link #onCreate} calls). Implementations may choose to
* swallow exceptions and return an alternative result.
* @param name the name of the element being bound
* @param target the item being bound
* @param context the bind context
* @param error the cause of the error (if the exception stands it may be re-thrown)
* @return the actual result that should be used (may be {@code null}).
* @throws Exception if the binding isn't valid
*/
default @Nullable Object onFailure(ConfigurationPropertyName name, Bindable<?> target, BindContext context,
Exception error) throws Exception {
throw error;
}
/**
* Called when binding finishes with either bound or unbound result. This method will
* not be called when binding failed, even if a handler returns a result from
* {@link #onFailure}.
* @param name the name of the element being bound
* @param target the item being bound
* @param context the bind context
* @param result the bound result (may be {@code null})
* @throws Exception if the binding isn't valid
*/
default void onFinish(ConfigurationPropertyName name, Bindable<?> target, BindContext context,
@Nullable Object result) throws Exception {
}
}
|
BindHandler
|
java
|
apache__flink
|
flink-clients/src/main/java/org/apache/flink/client/deployment/application/ApplicationRunner.java
|
{
"start": 1300,
"end": 1451
}
|
interface ____ access to the cluster's {@link DispatcherGateway}, and it does not go
* through the publicly exposed REST API.
*/
@Internal
public
|
assumes
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/testkit/AlwaysEqualComparator.java
|
{
"start": 755,
"end": 1429
}
|
class ____<T> implements Comparator<T> {
public static final AlwaysEqualComparator<Object> ALWAYS_EQUALS = alwaysEqual();
public static final AlwaysEqualComparator<String> ALWAYS_EQUALS_STRING = alwaysEqual();
public static final AlwaysEqualComparator<Timestamp> ALWAYS_EQUALS_TIMESTAMP = alwaysEqual();
public static final AlwaysEqualComparator<Tuple> ALWAYS_EQUALS_TUPLE = alwaysEqual();
@Override
public int compare(T o1, T o2) {
return 0;
}
@Override
public String toString() {
return "AlwaysEqualComparator";
}
public static <T> AlwaysEqualComparator<T> alwaysEqual() {
return new AlwaysEqualComparator<>();
}
}
|
AlwaysEqualComparator
|
java
|
apache__camel
|
components/camel-hl7/src/main/java/org/apache/camel/component/hl7/HL7MLLPNettyDecoder.java
|
{
"start": 1192,
"end": 3715
}
|
class ____ extends DelimiterBasedFrameDecoder {
private static final Logger LOG = LoggerFactory.getLogger(HL7MLLPNettyDecoder.class);
private static final int MAX_FRAME_LENGTH = Integer.MAX_VALUE;
private final HL7MLLPConfig config;
/**
* Creates a decoder instance using a default HL7MLLPConfig
*/
HL7MLLPNettyDecoder() {
this(new HL7MLLPConfig());
}
/**
* Creates a decoder instance
*
* @param config HL7MLLPConfig to be used for decoding
* @throws java.lang.NullPointerException is config is null
*/
HL7MLLPNettyDecoder(HL7MLLPConfig config) {
super(MAX_FRAME_LENGTH, true, Unpooled.copiedBuffer(
new char[] { config.getEndByte1(), config.getEndByte2() },
Charset.defaultCharset()));
this.config = config;
}
@Override
protected Object decode(ChannelHandlerContext ctx, ByteBuf buffer) throws Exception {
ByteBuf buf = (ByteBuf) super.decode(ctx, buffer);
if (buf != null) {
try {
int pos = buf.bytesBefore((byte) config.getStartByte());
if (pos >= 0) {
ByteBuf msg = buf.readerIndex(pos + 1).slice();
LOG.debug("Message ends with length {}", msg.readableBytes());
return config.isProduceString() ? asString(msg) : asByteArray(msg);
} else {
throw new DecoderException("Did not find start byte " + (int) config.getStartByte());
}
} finally {
// We need to release the buf here to avoid the memory leak
buf.release();
}
}
// Message not complete yet - return null to be called again
LOG.debug("No complete messages yet at position {}", buffer.readableBytes());
return null;
}
private byte[] asByteArray(ByteBuf msg) {
byte[] bytes = new byte[msg.readableBytes()];
msg.getBytes(0, bytes);
if (config.isConvertLFtoCR()) {
for (int i = 0; i < bytes.length; i++) {
if (bytes[i] == (byte) '\n') {
bytes[i] = (byte) '\r';
}
}
}
return bytes;
}
private String asString(ByteBuf msg) {
String s = msg.toString(config.getCharset());
if (config.isConvertLFtoCR()) {
return s.replace('\n', '\r');
}
return s;
}
}
|
HL7MLLPNettyDecoder
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/query/CorrelatedListJoinInSubqueryTest.java
|
{
"start": 3442,
"end": 3676
}
|
class ____ {
@Id
@GeneratedValue
private Long id;
private String uniqueKey;
public Entity1() {
}
public Entity1(String uniqueKey) {
this.uniqueKey = uniqueKey;
}
}
@Entity( name = "Entity2" )
public static
|
Entity1
|
java
|
apache__dubbo
|
dubbo-common/src/main/java/org/apache/dubbo/common/threadpool/MemoryLimitedLinkedBlockingQueue.java
|
{
"start": 1124,
"end": 3514
}
|
class ____<E> extends LinkedBlockingQueue<E> {
private static final long serialVersionUID = 1374792064759926198L;
private final MemoryLimiter memoryLimiter;
public MemoryLimitedLinkedBlockingQueue(Instrumentation inst) {
this(Integer.MAX_VALUE, inst);
}
public MemoryLimitedLinkedBlockingQueue(long memoryLimit, Instrumentation inst) {
super(Integer.MAX_VALUE);
this.memoryLimiter = new MemoryLimiter(memoryLimit, inst);
}
public MemoryLimitedLinkedBlockingQueue(Collection<? extends E> c, long memoryLimit, Instrumentation inst) {
super(c);
this.memoryLimiter = new MemoryLimiter(memoryLimit, inst);
}
public void setMemoryLimit(long memoryLimit) {
memoryLimiter.setMemoryLimit(memoryLimit);
}
public long getMemoryLimit() {
return memoryLimiter.getMemoryLimit();
}
public long getCurrentMemory() {
return memoryLimiter.getCurrentMemory();
}
public long getCurrentRemainMemory() {
return memoryLimiter.getCurrentRemainMemory();
}
@Override
public void put(E e) throws InterruptedException {
memoryLimiter.acquireInterruptibly(e);
super.put(e);
}
@Override
public boolean offer(E e, long timeout, TimeUnit unit) throws InterruptedException {
return memoryLimiter.acquire(e, timeout, unit) && super.offer(e, timeout, unit);
}
@Override
public boolean offer(E e) {
return memoryLimiter.acquire(e) && super.offer(e);
}
@Override
public E take() throws InterruptedException {
final E e = super.take();
memoryLimiter.releaseInterruptibly(e);
return e;
}
@Override
public E poll(long timeout, TimeUnit unit) throws InterruptedException {
final E e = super.poll(timeout, unit);
memoryLimiter.releaseInterruptibly(e, timeout, unit);
return e;
}
@Override
public E poll() {
final E e = super.poll();
memoryLimiter.release(e);
return e;
}
@Override
public boolean remove(Object o) {
final boolean success = super.remove(o);
if (success) {
memoryLimiter.release(o);
}
return success;
}
@Override
public void clear() {
super.clear();
memoryLimiter.clear();
}
}
|
MemoryLimitedLinkedBlockingQueue
|
java
|
netty__netty
|
transport/src/main/java/io/netty/channel/group/ChannelGroupFuture.java
|
{
"start": 4841,
"end": 7054
}
|
interface ____ extends Future<Void>, Iterable<ChannelFuture> {
/**
* Returns the {@link ChannelGroup} which is associated with this future.
*/
ChannelGroup group();
/**
* Returns the {@link ChannelFuture} of the individual I/O operation which
* is associated with the specified {@link Channel}.
*
* @return the matching {@link ChannelFuture} if found.
* {@code null} otherwise.
*/
ChannelFuture find(Channel channel);
/**
* Returns {@code true} if and only if all I/O operations associated with
* this future were successful without any failure.
*/
@Override
boolean isSuccess();
@Override
ChannelGroupException cause();
/**
* Returns {@code true} if and only if the I/O operations associated with
* this future were partially successful with some failure.
*/
boolean isPartialSuccess();
/**
* Returns {@code true} if and only if the I/O operations associated with
* this future have failed partially with some success.
*/
boolean isPartialFailure();
@Override
ChannelGroupFuture addListener(GenericFutureListener<? extends Future<? super Void>> listener);
@Override
ChannelGroupFuture addListeners(GenericFutureListener<? extends Future<? super Void>>... listeners);
@Override
ChannelGroupFuture removeListener(GenericFutureListener<? extends Future<? super Void>> listener);
@Override
ChannelGroupFuture removeListeners(GenericFutureListener<? extends Future<? super Void>>... listeners);
@Override
ChannelGroupFuture await() throws InterruptedException;
@Override
ChannelGroupFuture awaitUninterruptibly();
@Override
ChannelGroupFuture syncUninterruptibly();
@Override
ChannelGroupFuture sync() throws InterruptedException;
/**
* Returns the {@link Iterator} that enumerates all {@link ChannelFuture}s
* which are associated with this future. Please note that the returned
* {@link Iterator} is unmodifiable, which means a {@link ChannelFuture}
* cannot be removed from this future.
*/
@Override
Iterator<ChannelFuture> iterator();
}
|
ChannelGroupFuture
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockRecovery.java
|
{
"start": 30266,
"end": 31403
}
|
class ____ {
final Semaphore sem;
final AtomicBoolean gotInterruption = new AtomicBoolean(false);
TestStopWorkerSemaphore() {
this.sem = new Semaphore(0);
}
/**
* Attempt to acquire a sempahore within a given timeout.
*
* This is useful for unit tests where we need to ignore InterruptedException
* when attempting to take a semaphore, but still want to honor the overall
* test timeout.
*
* @param timeoutMs The timeout in miliseconds.
*/
private void uninterruptiblyAcquire(long timeoutMs) throws Exception {
long startTimeMs = Time.monotonicNow();
while (true) {
long remTime = startTimeMs + timeoutMs - Time.monotonicNow();
if (remTime < 0) {
throw new RuntimeException("Failed to acquire the semaphore within " +
timeoutMs + " milliseconds.");
}
try {
if (sem.tryAcquire(1, remTime, TimeUnit.MILLISECONDS)) {
return;
}
} catch (InterruptedException e) {
gotInterruption.set(true);
}
}
}
}
private
|
TestStopWorkerSemaphore
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/StatementSwitchToExpressionSwitchTest.java
|
{
"start": 92179,
"end": 92824
}
|
class ____ {
public int foo(Suit suit) {
var a = 0;
String b = "b";
int y = 0, x;
switch (suit) {
case HEART:
case DIAMOND:
x = ((y + 1) * (y * y)) << 1;
break;
case SPADE:
throw new RuntimeException();
default:
throw new NullPointerException();
}
return x;
}
}
""")
.addOutputLines(
"Test.java",
"""
|
Test
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java
|
{
"start": 1859,
"end": 10050
}
|
class ____ extends AggregatorTestCase {
public void testEmpty() throws Exception {
try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg").field("field").wrapLongitude(false);
MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field");
try (IndexReader reader = w.getReader()) {
InternalGeoBounds bounds = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType));
assertTrue(Double.isInfinite(bounds.top));
assertTrue(Double.isInfinite(bounds.bottom));
assertTrue(Double.isInfinite(bounds.posLeft));
assertTrue(Double.isInfinite(bounds.posRight));
assertTrue(Double.isInfinite(bounds.negLeft));
assertTrue(Double.isInfinite(bounds.negRight));
assertFalse(AggregationInspectionHelper.hasValue(bounds));
}
}
}
public void testUnmappedFieldWithDocs() throws Exception {
try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
if (randomBoolean()) {
Document doc = new Document();
doc.add(new LatLonDocValuesField("field", 0.0, 0.0));
w.addDocument(doc);
}
GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg").field("non_existent").wrapLongitude(false);
MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field");
try (IndexReader reader = w.getReader()) {
InternalGeoBounds bounds = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType));
assertTrue(Double.isInfinite(bounds.top));
assertTrue(Double.isInfinite(bounds.bottom));
assertTrue(Double.isInfinite(bounds.posLeft));
assertTrue(Double.isInfinite(bounds.posRight));
assertTrue(Double.isInfinite(bounds.negLeft));
assertTrue(Double.isInfinite(bounds.negRight));
assertFalse(AggregationInspectionHelper.hasValue(bounds));
}
}
}
public void testMissing() throws Exception {
try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
Document doc = new Document();
doc.add(new NumericDocValuesField("not_field", 1000L));
w.addDocument(doc);
MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field");
Point point = GeometryTestUtils.randomPoint(false);
double lon = GeoEncodingUtils.decodeLongitude(GeoEncodingUtils.encodeLongitude(point.getX()));
double lat = GeoEncodingUtils.decodeLatitude(GeoEncodingUtils.encodeLatitude(point.getY()));
// valid missing values
for (Object missingVal : List.of("POINT(" + lon + " " + lat + ")", lat + ", " + lon, new GeoPoint(lat, lon))) {
GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg").field("field")
.missing(missingVal)
.wrapLongitude(false);
try (IndexReader reader = w.getReader()) {
InternalGeoBounds bounds = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType));
assertThat(bounds.top, equalTo(lat));
assertThat(bounds.bottom, equalTo(lat));
assertThat(bounds.posLeft, equalTo(lon >= 0 ? lon : Double.POSITIVE_INFINITY));
assertThat(bounds.posRight, equalTo(lon >= 0 ? lon : Double.NEGATIVE_INFINITY));
assertThat(bounds.negLeft, equalTo(lon >= 0 ? Double.POSITIVE_INFINITY : lon));
assertThat(bounds.negRight, equalTo(lon >= 0 ? Double.NEGATIVE_INFINITY : lon));
}
}
}
}
public void testInvalidMissing() throws Exception {
try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
Document doc = new Document();
doc.add(new NumericDocValuesField("not_field", 1000L));
w.addDocument(doc);
MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field");
GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg").field("field")
.missing("invalid")
.wrapLongitude(false);
try (IndexReader reader = w.getReader()) {
ElasticsearchParseException exception = expectThrows(ElasticsearchParseException.class, () -> {
searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType));
});
assertThat(exception.getMessage(), startsWith("unsupported symbol"));
}
}
}
public void testRandom() throws Exception {
double top = Double.NEGATIVE_INFINITY;
double bottom = Double.POSITIVE_INFINITY;
double posLeft = Double.POSITIVE_INFINITY;
double posRight = Double.NEGATIVE_INFINITY;
double negLeft = Double.POSITIVE_INFINITY;
double negRight = Double.NEGATIVE_INFINITY;
int numDocs = randomIntBetween(50, 100);
try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
int numValues = randomIntBetween(1, 5);
for (int j = 0; j < numValues; j++) {
GeoPoint point = RandomGeoGenerator.randomPoint(random());
if (point.getLat() > top) {
top = point.getLat();
}
if (point.getLat() < bottom) {
bottom = point.getLat();
}
if (point.getLon() >= 0 && point.getLon() < posLeft) {
posLeft = point.getLon();
}
if (point.getLon() >= 0 && point.getLon() > posRight) {
posRight = point.getLon();
}
if (point.getLon() < 0 && point.getLon() < negLeft) {
negLeft = point.getLon();
}
if (point.getLon() < 0 && point.getLon() > negRight) {
negRight = point.getLon();
}
doc.add(new LatLonDocValuesField("field", point.getLat(), point.getLon()));
}
w.addDocument(doc);
}
GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg").field("field").wrapLongitude(false);
MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field");
try (IndexReader reader = w.getReader()) {
InternalGeoBounds bounds = searchAndReduce(reader, new AggTestConfig(aggBuilder, fieldType));
assertThat(bounds.top, closeTo(top, GEOHASH_TOLERANCE));
assertThat(bounds.bottom, closeTo(bottom, GEOHASH_TOLERANCE));
assertThat(bounds.posLeft, closeTo(posLeft, GEOHASH_TOLERANCE));
assertThat(bounds.posRight, closeTo(posRight, GEOHASH_TOLERANCE));
assertThat(bounds.negRight, closeTo(negRight, GEOHASH_TOLERANCE));
assertThat(bounds.negLeft, closeTo(negLeft, GEOHASH_TOLERANCE));
assertTrue(AggregationInspectionHelper.hasValue(bounds));
}
}
}
@Override
protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) {
return new GeoBoundsAggregationBuilder("foo").field(fieldName);
}
@Override
protected List<ValuesSourceType> getSupportedValuesSourceTypes() {
return List.of(CoreValuesSourceType.GEOPOINT);
}
}
|
GeoBoundsAggregatorTests
|
java
|
apache__camel
|
components/camel-ftp/src/test/java/org/apache/camel/component/file/remote/integration/FtpShutdownCompleteCurrentTaskOnlyIT.java
|
{
"start": 1186,
"end": 3261
}
|
class ____ extends FtpServerTestSupport {
private String getFtpUrl() {
return "ftp://admin@localhost:{{ftp.server.port}}/pending?password=admin&initialDelay=5000";
}
@Override
public void doPostSetup() throws Exception {
prepareFtpServer();
}
private void prepareFtpServer() {
// prepares the FTP Server by creating files on the server that we want
// to unit
String ftpUrl = "ftp://admin@localhost:{{ftp.server.port}}/pending/?password=admin";
template.sendBodyAndHeader(ftpUrl, "A", Exchange.FILE_NAME, "a.txt");
template.sendBodyAndHeader(ftpUrl, "B", Exchange.FILE_NAME, "b.txt");
template.sendBodyAndHeader(ftpUrl, "C", Exchange.FILE_NAME, "c.txt");
template.sendBodyAndHeader(ftpUrl, "D", Exchange.FILE_NAME, "d.txt");
template.sendBodyAndHeader(ftpUrl, "E", Exchange.FILE_NAME, "e.txt");
}
@Test
public void testShutdownCompleteCurrentTaskOnly() throws Exception {
// give it 20 seconds to shutdown
context.getShutdownStrategy().setTimeout(20);
MockEndpoint bar = getMockEndpoint("mock:bar");
bar.expectedMinimumMessageCount(1);
Thread.sleep(50);
MockEndpoint.assertIsSatisfied(context);
// shutdown during processing
context.stop();
// should NOT route all 5
assertTrue(bar.getReceivedCounter() < 5, "Should NOT complete all messages, was: " + bar.getReceivedCounter());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from(getFtpUrl()).routeId("route1")
// let it complete only current task so we shutdown faster
.shutdownRunningTask(ShutdownRunningTask.CompleteCurrentTaskOnly).delay(1000).syncDelayed()
.to("seda:foo");
from("seda:foo").routeId("route2").to("mock:bar");
}
};
}
}
|
FtpShutdownCompleteCurrentTaskOnlyIT
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/web/service/invoker/RequestHeaderArgumentResolver.java
|
{
"start": 1867,
"end": 2576
}
|
class ____ extends AbstractNamedValueArgumentResolver {
public RequestHeaderArgumentResolver(ConversionService conversionService) {
super(conversionService);
}
@Override
protected @Nullable NamedValueInfo createNamedValueInfo(MethodParameter parameter) {
RequestHeader annot = parameter.getParameterAnnotation(RequestHeader.class);
return (annot == null ? null :
new NamedValueInfo(annot.name(), annot.required(), annot.defaultValue(), "request header", true));
}
@Override
protected void addRequestValue(
String name, Object value, MethodParameter parameter, HttpRequestValues.Builder requestValues) {
requestValues.addHeader(name, (String) value);
}
}
|
RequestHeaderArgumentResolver
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/RedisPublisher.java
|
{
"start": 25957,
"end": 26721
}
|
class ____<T> extends StreamingOutput.Subscriber<T> {
private final StreamingOutput.Subscriber<T> first;
private final StreamingOutput.Subscriber<T> second;
public CompositeSubscriber(StreamingOutput.Subscriber<T> first, StreamingOutput.Subscriber<T> second) {
this.first = first;
this.second = second;
}
@Override
public void onNext(T t) {
throw new UnsupportedOperationException();
}
@Override
public void onNext(Collection<T> outputTarget, T t) {
first.onNext(outputTarget, t);
second.onNext(outputTarget, t);
}
}
/**
* Lettuce-specific interface.
*
* @param <T>
*/
|
CompositeSubscriber
|
java
|
spring-projects__spring-framework
|
spring-web/src/test/java/org/springframework/http/converter/json/Jackson2ObjectMapperFactoryBeanTests.java
|
{
"start": 15264,
"end": 15741
}
|
class ____ extends Module {
@Override
public String getModuleName() {
return getClass().getSimpleName();
}
@Override
public Version version() {
return Version.unknownVersion();
}
@Override
public void setupModule(SetupContext context) {
SimpleSerializers serializers = new SimpleSerializers();
serializers.addSerializer(Integer.class, new CustomIntegerSerializer());
context.addSerializers(serializers);
}
}
public static
|
CustomIntegerModule
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/operators/CombineTaskTest.java
|
{
"start": 2340,
"end": 8110
}
|
class ____
extends UnaryOperatorTestBase<
RichGroupReduceFunction<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>>,
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>> {
private static final long COMBINE_MEM = 3 * 1024 * 1024;
private final double combine_frac;
private final ArrayList<Tuple2<Integer, Integer>> outList = new ArrayList<>();
@SuppressWarnings("unchecked")
private final TypeSerializer<Tuple2<Integer, Integer>> serializer =
new TupleSerializer<>(
(Class<Tuple2<Integer, Integer>>) (Class<?>) Tuple2.class,
new TypeSerializer<?>[] {IntSerializer.INSTANCE, IntSerializer.INSTANCE});
private final TypeComparator<Tuple2<Integer, Integer>> comparator =
new TupleComparator<>(
new int[] {0},
new TypeComparator<?>[] {new IntComparator(true)},
new TypeSerializer<?>[] {IntSerializer.INSTANCE});
CombineTaskTest(ExecutionConfig config) {
super(config, COMBINE_MEM, 0);
combine_frac = (double) COMBINE_MEM / this.getMemoryManager().getMemorySize();
}
@TestTemplate
void testCombineTask() {
try {
int keyCnt = 100;
int valCnt = 20;
setInput(new UniformIntTupleGenerator(keyCnt, valCnt, false), serializer);
addDriverComparator(this.comparator);
addDriverComparator(this.comparator);
setOutput(this.outList, serializer);
getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_COMBINE);
getTaskConfig().setRelativeMemoryDriver(combine_frac);
getTaskConfig().setFilehandlesDriver(2);
final GroupReduceCombineDriver<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>>
testTask = new GroupReduceCombineDriver<>();
testDriver(testTask, MockCombiningReduceStub.class);
int expSum = 0;
for (int i = 1; i < valCnt; i++) {
expSum += i;
}
assertThat(this.outList).hasSize(keyCnt);
for (Tuple2<Integer, Integer> record : this.outList) {
assertThat(record.f1).isEqualTo(expSum);
}
this.outList.clear();
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@TestTemplate
void testFailingCombineTask() {
try {
int keyCnt = 100;
int valCnt = 20;
setInput(new UniformIntTupleGenerator(keyCnt, valCnt, false), serializer);
addDriverComparator(this.comparator);
addDriverComparator(this.comparator);
setOutput(new DiscardingOutputCollector<Tuple2<Integer, Integer>>());
getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_COMBINE);
getTaskConfig().setRelativeMemoryDriver(combine_frac);
getTaskConfig().setFilehandlesDriver(2);
final GroupReduceCombineDriver<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>>
testTask = new GroupReduceCombineDriver<>();
assertThatThrownBy(() -> testDriver(testTask, MockFailingCombiningReduceStub.class))
.isInstanceOf(ExpectedTestException.class);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@TestTemplate
void testCancelCombineTaskSorting() {
try {
MutableObjectIterator<Tuple2<Integer, Integer>> slowInfiniteInput =
new DelayingIterator<>(new InfiniteIntTupleIterator(), 1);
setInput(slowInfiniteInput, serializer);
addDriverComparator(this.comparator);
addDriverComparator(this.comparator);
setOutput(new DiscardingOutputCollector<Tuple2<Integer, Integer>>());
getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_COMBINE);
getTaskConfig().setRelativeMemoryDriver(combine_frac);
getTaskConfig().setFilehandlesDriver(2);
final GroupReduceCombineDriver<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>>
testTask = new GroupReduceCombineDriver<>();
Thread taskRunner =
new Thread() {
@Override
public void run() {
try {
testDriver(testTask, MockFailingCombiningReduceStub.class);
} catch (Exception e) {
// exceptions may happen during canceling
}
}
};
taskRunner.start();
// give the task some time
Thread.sleep(500);
// cancel
testTask.cancel();
// make sure it reacts to the canceling in some time
long deadline = System.currentTimeMillis() + 10000;
do {
taskRunner.interrupt();
taskRunner.join(5000);
} while (taskRunner.isAlive() && System.currentTimeMillis() < deadline);
assertThat(taskRunner.isAlive())
.withFailMessage("Task did not cancel properly within in 10 seconds.")
.isFalse();
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
// ------------------------------------------------------------------------
// Test Combiners
// ------------------------------------------------------------------------
public static
|
CombineTaskTest
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.