language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | google__gson | gson/src/main/java/com/google/gson/Gson.java | {
"start": 46199,
"end": 47129
} | class ____ a generic type since it will not have the generic
* type information because of the Type Erasure feature of Java. Therefore, this method should not
* be used if the desired type is a generic type. Note that this method works fine if any of the
* fields of the specified object are generics, just the object itself should not be a generic
* type. For the cases when the object is of generic type, invoke {@link #fromJson(String,
* TypeToken)}. If you have the JSON in a {@link Reader} instead of a String, use {@link
* #fromJson(Reader, Class)} instead.
*
* <p>An exception is thrown if the JSON string has multiple top-level JSON elements, or if there
* is trailing data. Use {@link #fromJson(JsonReader, Type)} if this behavior is not desired.
*
* @param <T> the type of the desired object
* @param json the string from which the object is to be deserialized
* @param classOfT the | is |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/schemaupdate/SchemaCreationToOutputScriptTest.java | {
"start": 5307,
"end": 5426
} | class ____ {
@Id
private Long id;
private String name;
}
@Entity(name = "MySecondEntity")
public static | MyEntity |
java | FasterXML__jackson-core | src/test/java/tools/jackson/core/unittest/write/ArrayGenerationTest.java | {
"start": 769,
"end": 10221
} | class ____ extends JacksonCoreTestBase
{
// 17-Sep-2024, tatu: [core#223] change to surrogates, let's use old behavior
// for now for simpler testing
private final JsonFactory FACTORY = streamFactoryBuilder()
.disable(JsonWriteFeature.COMBINE_UNICODE_SURROGATES_IN_UTF8)
.build();
protected TokenStreamFactory jsonFactory() {
return FACTORY;
}
@Test
void intArray() throws Exception
{
_testIntArray(false);
_testIntArray(true);
}
@Test
void longArray() throws Exception
{
_testLongArray(false);
_testLongArray(true);
}
@Test
void doubleArray() throws Exception
{
_testDoubleArray(false);
_testDoubleArray(true);
}
@Test
void stringArray() throws Exception
{
_testStringArray(false);
_testStringArray(true);
}
private void _testIntArray(boolean useBytes) throws Exception {
// first special cases of 0, 1 values
_testIntArray(useBytes, 0, 0, 0);
_testIntArray(useBytes, 0, 1, 1);
_testIntArray(useBytes, 1, 0, 0);
_testIntArray(useBytes, 1, 1, 1);
// and then some bigger data
_testIntArray(useBytes, 15, 0, 0);
_testIntArray(useBytes, 15, 2, 3);
_testIntArray(useBytes, 39, 0, 0);
_testIntArray(useBytes, 39, 4, 0);
_testIntArray(useBytes, 271, 0, 0);
_testIntArray(useBytes, 271, 0, 4);
_testIntArray(useBytes, 5009, 0, 0);
_testIntArray(useBytes, 5009, 0, 1);
}
private void _testLongArray(boolean useBytes) throws Exception {
// first special cases of 0, 1 values
_testLongArray(useBytes, 0, 0, 0);
_testLongArray(useBytes, 0, 1, 1);
_testLongArray(useBytes, 1, 0, 0);
_testLongArray(useBytes, 1, 1, 1);
// and then some bigger data
_testLongArray(useBytes, 15, 0, 0);
_testLongArray(useBytes, 15, 2, 3);
_testLongArray(useBytes, 39, 0, 0);
_testLongArray(useBytes, 39, 4, 0);
_testLongArray(useBytes, 271, 0, 0);
_testLongArray(useBytes, 271, 0, 4);
_testLongArray(useBytes, 5009, 0, 0);
_testLongArray(useBytes, 5009, 0, 1);
}
private void _testDoubleArray(boolean useBytes) throws Exception {
// first special cases of 0, 1 values
_testDoubleArray(useBytes, 0, 0, 0);
_testDoubleArray(useBytes, 0, 1, 1);
_testDoubleArray(useBytes, 1, 0, 0);
_testDoubleArray(useBytes, 1, 1, 1);
// and then some bigger data
_testDoubleArray(useBytes, 15, 0, 0);
_testDoubleArray(useBytes, 15, 2, 3);
_testDoubleArray(useBytes, 39, 0, 0);
_testDoubleArray(useBytes, 39, 4, 0);
_testDoubleArray(useBytes, 271, 0, 0);
_testDoubleArray(useBytes, 271, 0, 4);
_testDoubleArray(useBytes, 5009, 0, 0);
_testDoubleArray(useBytes, 5009, 0, 1);
}
private void _testStringArray(boolean useBytes) throws Exception {
// first special cases of 0, 1 values
_testStringArray(useBytes, 0, 0, 0);
_testStringArray(useBytes, 0, 1, 1);
_testStringArray(useBytes, 1, 0, 0);
_testStringArray(useBytes, 1, 1, 1);
// and then some bigger data
_testStringArray(useBytes, 15, 0, 0);
_testStringArray(useBytes, 15, 2, 3);
_testStringArray(useBytes, 39, 0, 0);
_testStringArray(useBytes, 39, 4, 0);
_testStringArray(useBytes, 271, 0, 0);
_testStringArray(useBytes, 271, 0, 4);
_testStringArray(useBytes, 5009, 0, 0);
_testStringArray(useBytes, 5009, 0, 1);
}
private void _testIntArray(boolean useBytes, int elements, int pre, int post) throws Exception
{
int[] values = new int[elements+pre+post];
for (int i = pre, end = pre+elements; i < end; ++i) {
values[i] = i-pre;
}
StringWriter sw = new StringWriter();
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
try (JsonGenerator gen = _generator(jsonFactory(), useBytes, bytes, sw)) {
gen.writeArray(values, pre, elements);
}
String json = useBytes ? bytes.toString("UTF-8") : sw.toString();
try (JsonParser p = _parser(jsonFactory(), useBytes, json)) {
assertToken(JsonToken.START_ARRAY, p.nextToken());
for (int i = 0; i < elements; ++i) {
if ((i & 1) == 0) { // alternate
JsonToken t = p.nextToken();
if (t != JsonToken.VALUE_NUMBER_INT) {
fail("Expected number, got "+t+", element #"+i);
}
assertEquals(i, p.getIntValue());
} else {
assertEquals(i, p.nextIntValue(-1));
}
}
assertToken(JsonToken.END_ARRAY, p.nextToken());
}
}
private void _testLongArray(boolean useBytes, int elements, int pre, int post) throws Exception
{
long[] values = new long[elements+pre+post];
for (int i = pre, end = pre+elements; i < end; ++i) {
values[i] = i-pre;
}
StringWriter sw = new StringWriter();
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
try (JsonGenerator gen = _generator(jsonFactory(), useBytes, bytes, sw)) {
gen.writeArray(values, pre, elements);
}
String json = useBytes ? bytes.toString("UTF-8") : sw.toString();
try (JsonParser p = _parser(jsonFactory(), useBytes, json)) {
assertToken(JsonToken.START_ARRAY, p.nextToken());
for (int i = 0; i < elements; ++i) {
if ((i & 1) == 0) { // alternate
JsonToken t = p.nextToken();
if (t != JsonToken.VALUE_NUMBER_INT) {
fail("Expected number, got "+t+", element #"+i);
}
assertEquals(i, p.getLongValue());
} else {
assertEquals(i, p.nextLongValue(-1));
}
}
assertToken(JsonToken.END_ARRAY, p.nextToken());
}
}
private void _testDoubleArray(boolean useBytes, int elements, int pre, int post) throws Exception
{
double[] values = new double[elements+pre+post];
for (int i = pre, end = pre+elements; i < end; ++i) {
values[i] = i-pre;
}
StringWriter sw = new StringWriter();
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
try (JsonGenerator gen = _generator(jsonFactory(), useBytes, bytes, sw)) {
gen.writeArray(values, pre, elements);
}
String json = useBytes ? bytes.toString("UTF-8") : sw.toString();
try (JsonParser p = _parser(jsonFactory(), useBytes, json)) {
assertToken(JsonToken.START_ARRAY, p.nextToken());
for (int i = 0; i < elements; ++i) {
JsonToken t = p.nextToken();
if (t != JsonToken.VALUE_NUMBER_FLOAT) {
fail("Expected floating-point number, got "+t+", element #"+i);
}
assertEquals((double) i, p.getDoubleValue());
}
assertToken(JsonToken.END_ARRAY, p.nextToken());
}
}
private void _testStringArray(boolean useBytes, int elements, int pre, int post) throws Exception
{
int byteLength = 16;
Random random = new Random();
Charset utf8 = Charset.forName("UTF-8");
String[] values = new String[elements+pre+post];
for (int i = pre, end = pre+elements; i < end; ++i) {
byte[] content = new byte[byteLength];
random.nextBytes(content);
values[i] = new String(content, utf8);
}
StringWriter sw = new StringWriter();
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
try (JsonGenerator gen = _generator(jsonFactory(), useBytes, bytes, sw)) {
gen.writeArray(values, pre, elements);
}
String json = useBytes ? bytes.toString("UTF-8") : sw.toString();
try (JsonParser p = _parser(jsonFactory(), useBytes, json)) {
assertToken(JsonToken.START_ARRAY, p.nextToken());
for (int i = 0; i < elements; ++i) {
JsonToken t = p.nextToken();
if (t != JsonToken.VALUE_STRING) {
fail("Expected string, got "+t+", element #"+i);
}
assertEquals(values[pre+i], p.getValueAsString());
}
assertToken(JsonToken.END_ARRAY, p.nextToken());
}
}
private JsonGenerator _generator(TokenStreamFactory f, boolean useBytes,
ByteArrayOutputStream bytes, Writer w)
throws Exception
{
if (useBytes) {
return f.createGenerator(ObjectWriteContext.empty(), bytes);
}
return f.createGenerator(ObjectWriteContext.empty(), w);
}
private JsonParser _parser(TokenStreamFactory f, boolean useBytes, String json)
throws Exception
{
if (useBytes) {
return f.createParser(ObjectReadContext.empty(),
json.getBytes(StandardCharsets.UTF_8));
}
return jsonFactory().createParser(ObjectReadContext.empty(), json);
}
}
| ArrayGenerationTest |
java | resilience4j__resilience4j | resilience4j-spring/src/test/java/io/github/resilience4j/bulkhead/configure/BulkHeadConfigurationSpringTest.java | {
"start": 5771,
"end": 6230
} | class ____ extends BulkheadConfigurationProperties {
BulkheadConfigurationPropertiesTest() {
InstanceProperties instanceProperties = new InstanceProperties();
instanceProperties.setBaseConfig("sharedConfig");
instanceProperties.setMaxConcurrentCalls(3);
getConfigs().put("sharedBackend", instanceProperties);
}
}
private | BulkheadConfigurationPropertiesTest |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/aroundconstruct/AroundConstructWithParameterChangeTest.java | {
"start": 796,
"end": 1356
} | class ____ {
@RegisterExtension
public ArcTestContainer container = new ArcTestContainer(SimpleBean.class, MyDependency.class,
MyInterceptorBinding.class, MyInterceptor.class);
@Test
public void test() {
SimpleBean simpleBean = Arc.container().instance(SimpleBean.class).get();
assertNotNull(simpleBean);
assertNotNull(simpleBean.dependency);
assertEquals("from interceptor", simpleBean.dependency.value);
}
@Singleton
@MyInterceptorBinding
static | AroundConstructWithParameterChangeTest |
java | quarkusio__quarkus | integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/KnativeGlobalRequestsPerSecondTest.java | {
"start": 559,
"end": 2091
} | class ____ {
@RegisterExtension
static final QuarkusProdModeTest config = new QuarkusProdModeTest()
.withApplicationRoot((jar) -> jar.addClasses(GreetingResource.class))
.setApplicationName("knative-global-requests-per-second")
.setApplicationVersion("0.1-SNAPSHOT")
.withConfigurationResource("knative-global-requests-per-second.properties");
@ProdBuildResults
private ProdModeTestResults prodModeTestResults;
@Test
public void assertGeneratedResources() throws IOException {
Path kubernetesDir = prodModeTestResults.getBuildDir().resolve("kubernetes");
assertThat(kubernetesDir)
.isDirectoryContaining(p -> p.getFileName().endsWith("knative.json"))
.isDirectoryContaining(p -> p.getFileName().endsWith("knative.yml"))
.satisfies(p -> assertThat(p.toFile().listFiles()).hasSize(2));
List<HasMetadata> kubernetesList = DeserializationUtil
.deserializeAsList(kubernetesDir.resolve("knative.yml"));
assertThat(kubernetesList).filteredOn(i -> "ConfigMap".equals(i.getKind())).singleElement().satisfies(c -> {
assertThat(c.getMetadata()).satisfies(m -> assertThat(m.getName()).isEqualTo("config-autoscaler"));
assertThat(c).isInstanceOfSatisfying(ConfigMap.class, m -> {
assertThat(m.getData()).contains(entry("requests-per-second-target-default", "150"));
});
});
}
}
| KnativeGlobalRequestsPerSecondTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/cache/ManyToOneTest.java | {
"start": 2506,
"end": 3233
} | class ____ {
@Id
@GeneratedValue
private Long id;
private String name;
@ManyToOne
private EntityB entityB;
public EntityA() {
}
public EntityA(String name, EntityB entityB) {
this.name = name;
this.entityB = entityB;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public EntityB getEntityB() {
return entityB;
}
public void setEntityB(EntityB entityB) {
this.entityB = entityB;
}
}
@Entity(name = "EntityB")
@BatchSize(size = 500)
@Cacheable
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
public static | EntityA |
java | apache__camel | components/camel-wal/src/main/java/org/apache/camel/component/wal/LogReader.java | {
"start": 1260,
"end": 7173
} | class ____ implements AutoCloseable {
public static final int DEFAULT_CAPACITY = 1024 * 512;
private static final Logger LOG = LoggerFactory.getLogger(LogReader.class);
private final FileChannel fileChannel;
private final ByteBuffer ioBuffer;
private final Header header;
/**
* Constructor
*
* @param logFile the report file name
* @throws IOException in case of I/O errors
*/
public LogReader(final File logFile) throws IOException {
this(logFile, DEFAULT_CAPACITY);
}
/**
* Constructor
*
* @param logFile the report file name
* @throws IOException in case of I/O errors
*/
public LogReader(final File logFile, int capacity) throws IOException {
this.fileChannel = FileChannel.open(logFile.toPath(), StandardOpenOption.READ);
ioBuffer = ByteBuffer.allocateDirect(capacity);
header = readHeader();
}
/**
* Gets the file header
*
* @return the file header
*/
public Header getHeader() {
return header;
}
/**
* Reads the header from the file
*
* @return the header or null if the file is empty
* @throws IOException in case of lower-level I/O errors
*/
private Header readHeader() throws IOException {
if (fileChannel.size() == 0) {
return null;
}
ioBuffer.clear();
int bytesRead = fileChannel.read(ioBuffer);
if (bytesRead <= 0) {
throw new IllegalArgumentException("The file does not contain a valid header");
}
LOG.trace("Read {} bytes from the file channel", bytesRead);
ioBuffer.flip();
byte[] name = new byte[Header.FORMAT_NAME_SIZE];
ioBuffer.get(name, 0, Header.FORMAT_NAME_SIZE);
LOG.trace("File format name: '{}'", new String(name));
int fileVersion = ioBuffer.getInt();
LOG.trace("File format version: '{}'", fileVersion);
return new Header(new String(name), fileVersion);
}
/**
* Read an entry from the file.
*
* @return A log entry from the file or null when reaching the end-of-file or if the file is empty
* @throws IOException if unable to read the entry
*/
public PersistedLogEntry readEntry() throws IOException {
if (header == null) {
return null;
}
logBufferInfo();
if (ioBuffer.hasRemaining()) {
return doReadEntry();
}
if (LOG.isTraceEnabled()) {
LOG.trace("Read it all from the buffer. Fetching again from the channel");
}
if (!reload()) {
return null;
}
return doReadEntry();
}
/**
* A lower-level routine to read a single entry from the transaction log
*
* @return A log entry from the file or null when reaching the end-of-file or if the file is empty
* @throws IOException if unable to read the entry
*/
private PersistedLogEntry doReadEntry() throws IOException {
if (ioBuffer.remaining() < Integer.BYTES) {
if (!reload()) {
return null;
}
}
int state = ioBuffer.getInt();
Slot keySlot = readSlot();
Slot valueSlot = readSlot();
EntryInfo entryInfo = EntryInfo.createForPersisted(fileChannel.position());
return new PersistedLogEntry(
entryInfo, LogEntry.EntryState.fromInt(state), keySlot.metadata, keySlot.data,
valueSlot.metadata, valueSlot.data);
}
/**
* Reads a data slot (i.e.: containing a key or a value)
*
* @return the data slot
* @throws IOException if the record is invalid or the data too large for the buffer
*/
private Slot readSlot() throws IOException {
Slot slot = new Slot();
// The buffer needs to have enough space for the metadata and length.
if (ioBuffer.remaining() < (Integer.BYTES * 2)) {
if (!reload()) {
throw new InvalidRecordException("A data slot within a record is incomplete or malformed");
}
}
slot.metadata = ioBuffer.getInt();
slot.length = ioBuffer.getInt();
if (ioBuffer.capacity() < slot.length) {
throw new BufferTooSmallException(ioBuffer.capacity(), slot.length);
}
if (ioBuffer.remaining() < slot.length) {
if (!reload()) {
throw new InvalidRecordException("A data slot within a record is incomplete or malformed");
}
}
slot.data = new byte[slot.length];
ioBuffer.get(slot.data);
return slot;
}
/**
* Reloads data into the intermediate buffer, compacting it on the process
*
* @return true if has read data into the buffer (reloaded) or false otherwise
* @throws IOException in case of lower-level I/O errors
*/
private boolean reload() throws IOException {
try {
ioBuffer.compact();
int read = fileChannel.read(ioBuffer);
if (read > 0) {
return true;
}
} finally {
ioBuffer.flip();
}
return false;
}
private void logBufferInfo() {
if (LOG.isTraceEnabled()) {
LOG.trace("Remaining: {}", ioBuffer.remaining());
LOG.trace("Position: {}", ioBuffer.position());
LOG.trace("Has Remaining: {}", ioBuffer.hasRemaining());
}
}
/**
* Close the reader and release resources
*/
@Override
public void close() {
try {
fileChannel.close();
} catch (IOException e) {
LOG.error(e.getMessage(), e);
}
}
/**
* A wrapper for a data slot
*/
private static | LogReader |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/strategies/VaryingSequenceInputTypeStrategy.java | {
"start": 1954,
"end": 6357
} | class ____ implements InputTypeStrategy {
private final int constantArgumentCount;
private final List<ArgumentTypeStrategy> constantArgumentStrategies;
private final ArgumentTypeStrategy varyingArgumentStrategy;
private final @Nullable List<String> argumentNames;
public VaryingSequenceInputTypeStrategy(
List<ArgumentTypeStrategy> argumentStrategies, @Nullable List<String> argumentNames) {
Preconditions.checkArgument(argumentStrategies.size() > 0);
Preconditions.checkArgument(
argumentNames == null || argumentNames.size() == argumentStrategies.size());
constantArgumentCount = argumentStrategies.size() - 1;
constantArgumentStrategies = argumentStrategies.subList(0, constantArgumentCount);
varyingArgumentStrategy = argumentStrategies.get(constantArgumentCount);
this.argumentNames = argumentNames;
}
@Override
public ArgumentCount getArgumentCount() {
return ConstantArgumentCount.from(constantArgumentCount);
}
@Override
public Optional<List<DataType>> inferInputTypes(
CallContext callContext, boolean throwOnFailure) {
final List<DataType> dataTypes = callContext.getArgumentDataTypes();
if (dataTypes.size() < constantArgumentCount) {
return Optional.empty();
}
final List<DataType> inferredDataTypes = new ArrayList<>(dataTypes.size());
for (int i = 0; i < callContext.getArgumentDataTypes().size(); i++) {
final ArgumentTypeStrategy argumentTypeStrategy;
if (i < constantArgumentCount) {
argumentTypeStrategy = constantArgumentStrategies.get(i);
} else {
argumentTypeStrategy = varyingArgumentStrategy;
}
final Optional<DataType> inferredDataType =
argumentTypeStrategy.inferArgumentType(callContext, i, throwOnFailure);
if (!inferredDataType.isPresent()) {
return Optional.empty();
}
inferredDataTypes.add(inferredDataType.get());
}
return Optional.of(inferredDataTypes);
}
@Override
public List<Signature> getExpectedSignatures(FunctionDefinition definition) {
final Signature.Argument varyingArgument =
varyingArgumentStrategy.getExpectedArgument(definition, constantArgumentCount);
final Signature.Argument newArg;
final String type = varyingArgument.getType();
if (argumentNames == null) {
newArg = Signature.Argument.ofVarying(type);
} else {
newArg = Signature.Argument.ofVarying(argumentNames.get(constantArgumentCount), type);
}
final List<Signature.Argument> arguments = new ArrayList<>();
for (int i = 0; i < constantArgumentCount; i++) {
if (argumentNames == null) {
arguments.add(constantArgumentStrategies.get(i).getExpectedArgument(definition, i));
} else {
arguments.add(
Signature.Argument.of(
argumentNames.get(i),
constantArgumentStrategies
.get(i)
.getExpectedArgument(definition, i)
.getType()));
}
}
arguments.add(newArg);
return Collections.singletonList(Signature.of(arguments));
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
VaryingSequenceInputTypeStrategy that = (VaryingSequenceInputTypeStrategy) o;
return constantArgumentCount == that.constantArgumentCount
&& Objects.equals(constantArgumentStrategies, that.constantArgumentStrategies)
&& Objects.equals(varyingArgumentStrategy, that.varyingArgumentStrategy)
&& Objects.equals(argumentNames, that.argumentNames);
}
@Override
public int hashCode() {
return Objects.hash(
constantArgumentCount,
constantArgumentStrategies,
varyingArgumentStrategy,
argumentNames);
}
}
| VaryingSequenceInputTypeStrategy |
java | apache__camel | test-infra/camel-test-infra-kafka/src/main/java/org/apache/camel/test/infra/kafka/services/ContainerLocalKafkaInfraService.java | {
"start": 2579,
"end": 4362
} | class ____ extends KafkaContainer {
public TestInfraKafkaContainer(boolean fixedPort) {
super(DockerImageName.parse(System.getProperty(KafkaProperties.KAFKA_CONTAINER, KAFKA3_IMAGE_NAME))
.asCompatibleSubstituteFor("apache/kafka"));
if (fixedPort) {
addFixedExposedPort(9092, 9092);
}
// For random ports, testcontainers will handle port mapping automatically
}
}
return new TestInfraKafkaContainer(ContainerEnvironmentUtil.isFixedPort(this.getClass()));
}
public String getBootstrapServers() {
return kafka.getBootstrapServers();
}
@Override
public String brokers() {
return getBootstrapServers();
}
@Override
public void registerProperties() {
System.setProperty(KafkaProperties.KAFKA_BOOTSTRAP_SERVERS, getBootstrapServers());
}
@Override
public void initialize() {
kafka.start();
registerProperties();
LOG.info("Kafka bootstrap server running at address {}", kafka.getBootstrapServers());
}
@Override
public void shutdown() {
LOG.info("Shutting down Kafka container");
kafka.stop();
}
@Override
public KafkaContainer getContainer() {
return kafka;
}
public static ContainerLocalKafkaInfraService kafka3Container() {
KafkaContainer container
= new KafkaContainer(
DockerImageName.parse(System.getProperty(KafkaProperties.KAFKA_CONTAINER, KAFKA3_IMAGE_NAME))
.asCompatibleSubstituteFor("apache/kafka"));
return new ContainerLocalKafkaInfraService(container);
}
}
| TestInfraKafkaContainer |
java | apache__camel | components/camel-consul/src/test/java/org/apache/camel/component/consul/cluster/ConsulClusteredRoutePolicyFactoryIT.java | {
"start": 1698,
"end": 4777
} | class ____ {
@RegisterExtension
public static ConsulService service = ConsulServiceFactory.createService();
private static final Logger LOGGER = LoggerFactory.getLogger(ConsulClusteredRoutePolicyFactoryIT.class);
private static final List<String> CLIENTS = IntStream.range(0, 3).mapToObj(Integer::toString).toList();
private static final List<String> RESULTS = new ArrayList<>();
private static final ScheduledExecutorService SCHEDULER = Executors.newScheduledThreadPool(CLIENTS.size() * 2);
private static final CountDownLatch LATCH = new CountDownLatch(CLIENTS.size());
// ************************************
// Test
// ************************************
@Test
public void test() throws Exception {
for (String id : CLIENTS) {
SCHEDULER.submit(() -> run(id));
}
LATCH.await(1, TimeUnit.MINUTES);
SCHEDULER.shutdownNow();
Assertions.assertEquals(CLIENTS.size(), RESULTS.size());
Assertions.assertTrue(RESULTS.containsAll(CLIENTS));
}
// ************************************
// Run a Camel node
// ************************************
private static void run(String id) {
try {
int events = ThreadLocalRandom.current().nextInt(2, 6);
CountDownLatch contextLatch = new CountDownLatch(events);
ConsulClusterService consulClusterService = new ConsulClusterService();
consulClusterService.setId("node-" + id);
consulClusterService.setUrl(service.getConsulUrl());
LOGGER.info("Consul URL {}", consulClusterService.getUrl());
DefaultCamelContext context = new DefaultCamelContext();
context.disableJMX();
context.getCamelContextExtension().setName("context-" + id);
context.addService(consulClusterService);
context.addRoutePolicyFactory(ClusteredRoutePolicyFactory.forNamespace("my-ns"));
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("timer:consul?delay=1000&period=1000").routeId("route-" + id).log("From ${routeId}")
.process(e -> contextLatch.countDown());
}
});
// Start the context after some random time so the startup order
// changes for each test.
Awaitility.await().pollDelay(ThreadLocalRandom.current().nextInt(500), TimeUnit.MILLISECONDS)
.untilAsserted(() -> Assertions.assertDoesNotThrow(context::start));
LOGGER.info("Starting CamelContext on node: {}", id);
context.start();
LOGGER.info("Started CamelContext on node: {}", id);
contextLatch.await();
LOGGER.info("Shutting down node {}", id);
RESULTS.add(id);
context.stop();
LATCH.countDown();
} catch (Exception e) {
LOGGER.warn("{}", e.getMessage(), e);
}
}
}
| ConsulClusteredRoutePolicyFactoryIT |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/support/component/ApiMethodArg.java | {
"start": 855,
"end": 3091
} | class ____ {
private final String name;
private final Class<?> type;
private final String typeArgs;
private final String rawTypeArgs;
private final String description;
private final boolean setter;
public ApiMethodArg(String name, Class<?> type, String typeArgs, String rawTypeArgs, String description) {
this.name = name;
this.type = type;
this.typeArgs = typeArgs;
this.rawTypeArgs = rawTypeArgs;
this.description = description;
this.setter = false;
}
public ApiMethodArg(String name, Class<?> type, String typeArgs, String rawTypeArgs, String description, boolean setter) {
this.name = name;
this.type = type;
this.typeArgs = typeArgs;
this.rawTypeArgs = rawTypeArgs;
this.description = description;
this.setter = setter;
}
public String getName() {
return this.name;
}
public Class<?> getType() {
return this.type;
}
public String getTypeArgs() {
return this.typeArgs;
}
public String getRawTypeArgs() {
return rawTypeArgs;
}
public String getDescription() {
return description;
}
public boolean isSetter() {
return setter;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder(128);
builder.append(type.getCanonicalName());
if (typeArgs != null) {
builder.append("<").append(typeArgs).append(">");
}
builder.append(" ").append(name);
return builder.toString();
}
public static ApiMethodArg arg(String name, Class<?> type) {
return new ApiMethodArg(name, type, null, null, null);
}
public static ApiMethodArg arg(String name, Class<?> type, String typeArgs) {
return new ApiMethodArg(name, type, typeArgs, null, null);
}
public static ApiMethodArg arg(String name, Class<?> type, String typeArgs, String description) {
return new ApiMethodArg(name, type, typeArgs, null, description);
}
public static ApiMethodArg setter(String name, Class<?> type) {
return new ApiMethodArg(name, type, null, null, null, true);
}
}
| ApiMethodArg |
java | apache__flink | flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/TypeInferenceExtractorTest.java | {
"start": 105229,
"end": 105424
} | class ____ implements Procedure {
public int[] call(Object procedureContext, String f1, Integer f2) {
return null;
}
}
private static | ArgumentHintOnClassProcedure |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java | {
"start": 6562,
"end": 19019
} | enum ____ implements PlanReduction {
NO_REDUCTION,
}
/** The plan here is used as a fallback if the reduce driver cannot be planned in a way that avoids field extraction after TopN. */
public record TopNReduction(PhysicalPlan plan) implements PlanReduction {}
public record ReducedPlan(PhysicalPlan plan) implements PlanReduction {}
public static PlanReduction reductionPlan(PhysicalPlan plan) {
// find the logical fragment
var fragments = plan.collectFirstChildren(p -> p instanceof FragmentExec);
if (fragments.isEmpty()) {
return SimplePlanReduction.NO_REDUCTION;
}
final FragmentExec fragment = (FragmentExec) fragments.getFirst();
// Though FORK is technically a pipeline breaker, it should never show up here.
// See also: https://github.com/elastic/elasticsearch/pull/131945/files#r2235572935
final var pipelineBreakers = fragment.fragment().collectFirstChildren(p -> p instanceof PipelineBreaker);
if (pipelineBreakers.isEmpty()) {
return SimplePlanReduction.NO_REDUCTION;
}
final LogicalPlan pipelineBreaker = pipelineBreakers.getFirst();
int estimatedRowSize = fragment.estimatedRowSize();
return switch (LocalMapper.INSTANCE.map(pipelineBreaker)) {
case TopNExec topN -> new TopNReduction(EstimatesRowSize.estimateRowSize(estimatedRowSize, topN));
case AggregateExec aggExec -> getPhysicalPlanReduction(estimatedRowSize, aggExec.withMode(AggregatorMode.INTERMEDIATE));
case PhysicalPlan p -> getPhysicalPlanReduction(estimatedRowSize, p);
};
}
private static ReducedPlan getPhysicalPlanReduction(int estimatedRowSize, PhysicalPlan plan) {
return new ReducedPlan(EstimatesRowSize.estimateRowSize(estimatedRowSize, plan));
}
public static boolean requiresSortedTimeSeriesSource(PhysicalPlan plan) {
return plan.anyMatch(e -> {
if (e instanceof FragmentExec f) {
return f.fragment().anyMatch(l -> l instanceof EsRelation r && r.indexMode() == IndexMode.TIME_SERIES);
}
return false;
});
}
public static void forEachRelation(PhysicalPlan plan, Consumer<EsRelation> action) {
plan.forEachDown(FragmentExec.class, f -> f.fragment().forEachDown(EsRelation.class, r -> {
if (r.indexMode() != IndexMode.LOOKUP) {
action.accept(r);
}
}));
}
public static PhysicalPlan localPlan(
PlannerSettings plannerSettings,
EsqlFlags flags,
List<SearchExecutionContext> searchContexts,
Configuration configuration,
FoldContext foldCtx,
PhysicalPlan plan
) {
return localPlan(plannerSettings, flags, configuration, foldCtx, plan, SearchContextStats.from(searchContexts));
}
public static PhysicalPlan localPlan(
PlannerSettings plannerSettings,
EsqlFlags flags,
Configuration configuration,
FoldContext foldCtx,
PhysicalPlan plan,
SearchStats searchStats
) {
final var logicalOptimizer = new LocalLogicalPlanOptimizer(new LocalLogicalOptimizerContext(configuration, foldCtx, searchStats));
var physicalOptimizer = new LocalPhysicalPlanOptimizer(
new LocalPhysicalOptimizerContext(plannerSettings, flags, configuration, foldCtx, searchStats)
);
return localPlan(plan, logicalOptimizer, physicalOptimizer);
}
public static PhysicalPlan integrateEsFilterIntoFragment(PhysicalPlan plan, @Nullable QueryBuilder esFilter) {
return esFilter == null ? plan : plan.transformUp(FragmentExec.class, f -> {
var fragmentFilter = f.esFilter();
// TODO: have an ESFilter and push down to EsQueryExec / EsSource
// This is an ugly hack to push the filter parameter to Lucene
// TODO: filter integration testing
var filter = fragmentFilter != null ? boolQuery().filter(fragmentFilter).must(esFilter) : esFilter;
LOGGER.debug("Fold filter {} to EsQueryExec", filter);
return f.withFilter(filter);
});
}
public static PhysicalPlan localPlan(
PhysicalPlan plan,
LocalLogicalPlanOptimizer logicalOptimizer,
LocalPhysicalPlanOptimizer physicalOptimizer
) {
var isCoordPlan = new Holder<>(Boolean.TRUE);
Set<PhysicalPlan> lookupJoinExecRightChildren = plan.collect(LookupJoinExec.class::isInstance)
.stream()
.map(x -> ((LookupJoinExec) x).right())
.collect(Collectors.toSet());
PhysicalPlan localPhysicalPlan = plan.transformUp(FragmentExec.class, f -> {
if (lookupJoinExecRightChildren.contains(f)) {
// Do not optimize the right child of a lookup join exec
// The data node does not have the right stats to perform the optimization because the stats are on the lookup node
// Also we only ship logical plans across the network, so the plan needs to remain logical
return f;
}
isCoordPlan.set(Boolean.FALSE);
LogicalPlan optimizedFragment = logicalOptimizer.localOptimize(f.fragment());
PhysicalPlan physicalFragment = LocalMapper.INSTANCE.map(optimizedFragment);
QueryBuilder filter = f.esFilter();
if (filter != null) {
physicalFragment = physicalFragment.transformUp(
EsSourceExec.class,
query -> new EsSourceExec(Source.EMPTY, query.indexPattern(), query.indexMode(), query.output(), filter)
);
}
var localOptimized = physicalOptimizer.localOptimize(physicalFragment);
return EstimatesRowSize.estimateRowSize(f.estimatedRowSize(), localOptimized);
});
return isCoordPlan.get() ? plan : localPhysicalPlan;
}
/**
* Extracts a filter that can be used to skip unmatched shards on the coordinator.
*/
public static QueryBuilder canMatchFilter(
EsqlFlags flags,
Configuration configuration,
TransportVersion minTransportVersion,
PhysicalPlan plan
) {
return detectFilter(flags, configuration, minTransportVersion, plan, CoordinatorRewriteContext.SUPPORTED_FIELDS::contains);
}
/**
* Note that since this filter does not have access to SearchStats, it cannot detect if the field is a text field with a delegate.
* We currently only use this filter for the @timestamp field, which is always a date field. Any tests that wish to use this should
* take care to not use it with TEXT fields.
*/
static QueryBuilder detectFilter(
EsqlFlags flags,
Configuration configuration,
TransportVersion minTransportVersion,
PhysicalPlan plan,
Predicate<String> fieldName
) {
// first position is the REST filter, the second the query filter
final List<QueryBuilder> requestFilters = new ArrayList<>();
final LucenePushdownPredicates ctx = LucenePushdownPredicates.forCanMatch(minTransportVersion, flags);
plan.forEachDown(FragmentExec.class, fe -> {
if (fe.esFilter() != null && fe.esFilter().supportsVersion(minTransportVersion)) {
requestFilters.add(fe.esFilter());
}
// detect filter inside the query
fe.fragment().forEachUp(Filter.class, f -> {
// the only filter that can be pushed down is that on top of the relation
// reuses the logic from LocalPhysicalPlanOptimizer#PushFiltersToSource
// but get executed on the logical plan
List<Expression> matches = new ArrayList<>();
if (f.child() instanceof EsRelation) {
var conjunctions = Predicates.splitAnd(f.condition());
// look only at expressions that contain literals and the target field
for (var exp : conjunctions) {
var refsBuilder = AttributeSet.builder().addAll(exp.references());
// remove literals or attributes that match by name
boolean matchesField = refsBuilder.removeIf(e -> fieldName.test(e.name()));
// the expression only contains the target reference
// and the expression is pushable (functions can be fully translated)
if (matchesField
&& refsBuilder.isEmpty()
&& translatable(exp, ctx).finish() == TranslationAware.FinishedTranslatable.YES) {
matches.add(exp);
}
}
}
if (matches.isEmpty() == false) {
Query qlQuery = TRANSLATOR_HANDLER.asQuery(ctx, Predicates.combineAnd(matches));
QueryBuilder builder = qlQuery.toQueryBuilder();
if (qlQuery.containsPlan()) {
builder = new PlanStreamWrapperQueryBuilder(configuration, builder);
}
requestFilters.add(builder);
}
});
});
return Queries.combine(FILTER, requestFilters);
}
/**
* Map QL's {@link DataType} to the compute engine's {@link ElementType}, for sortable types only.
* This specifically excludes spatial data types, which are not themselves sortable.
*/
public static ElementType toSortableElementType(DataType dataType) {
if (DataType.isSpatialOrGrid(dataType)) {
return ElementType.UNKNOWN;
}
return toElementType(dataType);
}
/**
* Map QL's {@link DataType} to the compute engine's {@link ElementType}.
*/
public static ElementType toElementType(DataType dataType) {
return toElementType(dataType, NONE);
}
/**
* Map QL's {@link DataType} to the compute engine's {@link ElementType}.
* Under some situations, the same data type might be extracted into a different element type.
* For example, spatial types can be extracted into doc-values under specific conditions, otherwise they extract as BytesRef.
*/
public static ElementType toElementType(DataType dataType, MappedFieldType.FieldExtractPreference fieldExtractPreference) {
return switch (dataType) {
case LONG, DATETIME, DATE_NANOS, UNSIGNED_LONG, COUNTER_LONG, GEOHASH, GEOTILE, GEOHEX -> ElementType.LONG;
case INTEGER, COUNTER_INTEGER -> ElementType.INT;
case DOUBLE, COUNTER_DOUBLE -> ElementType.DOUBLE;
// unsupported fields are passed through as a BytesRef
case KEYWORD, TEXT, IP, SOURCE, VERSION, UNSUPPORTED -> ElementType.BYTES_REF;
case NULL -> ElementType.NULL;
case BOOLEAN -> ElementType.BOOLEAN;
case DOC_DATA_TYPE -> ElementType.DOC;
case TSID_DATA_TYPE -> ElementType.BYTES_REF;
case GEO_POINT, CARTESIAN_POINT -> fieldExtractPreference == DOC_VALUES ? ElementType.LONG : ElementType.BYTES_REF;
case GEO_SHAPE, CARTESIAN_SHAPE -> fieldExtractPreference == EXTRACT_SPATIAL_BOUNDS ? ElementType.INT : ElementType.BYTES_REF;
case PARTIAL_AGG -> ElementType.COMPOSITE;
case AGGREGATE_METRIC_DOUBLE -> ElementType.AGGREGATE_METRIC_DOUBLE;
case EXPONENTIAL_HISTOGRAM -> ElementType.EXPONENTIAL_HISTOGRAM;
case DENSE_VECTOR -> ElementType.FLOAT;
case SHORT, BYTE, DATE_PERIOD, TIME_DURATION, OBJECT, FLOAT, HALF_FLOAT, SCALED_FLOAT -> throw EsqlIllegalArgumentException
.illegalDataType(dataType);
};
}
/**
* A non-breaking block factory used to create small pages during the planning
* TODO: Remove this
*/
@Deprecated(forRemoval = true)
public static final BlockFactory NON_BREAKING_BLOCK_FACTORY = BlockFactory.getInstance(
new NoopCircuitBreaker("noop-esql-breaker"),
BigArrays.NON_RECYCLING_INSTANCE
);
public static boolean usesScoring(QueryPlan<?> plan) {
return plan.output().stream().anyMatch(attr -> attr instanceof MetadataAttribute ma && ma.name().equals(MetadataAttribute.SCORE));
}
}
| SimplePlanReduction |
java | quarkusio__quarkus | extensions/tls-registry/deployment/src/test/java/io/quarkus/tls/P12KeyStoreWithOverriddenCredentialsProviderTest.java | {
"start": 1004,
"end": 2273
} | class ____ {
private static final String configuration = """
quarkus.tls.key-store.p12.path=target/certs/test-credentials-provider-keystore.p12
quarkus.tls.key-store.p12.password=secret123!
quarkus.tls.key-store.credentials-provider.name=tls
""";
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class)
.addClass(MyCredentialProvider.class)
.add(new StringAsset(configuration), "application.properties"));
@Inject
TlsConfigurationRegistry certificates;
@Test
void test() throws KeyStoreException, CertificateParsingException {
TlsConfiguration def = certificates.getDefault().orElseThrow();
X509Certificate certificate = (X509Certificate) def.getKeyStore().getCertificate("test-credentials-provider");
assertThat(certificate).isNotNull();
assertThat(certificate.getSubjectAlternativeNames()).anySatisfy(l -> {
assertThat(l.get(0)).isEqualTo(2);
assertThat(l.get(1)).isEqualTo("localhost");
});
}
@ApplicationScoped
public static | P12KeyStoreWithOverriddenCredentialsProviderTest |
java | spring-projects__spring-framework | spring-tx/src/test/java/org/springframework/transaction/annotation/EnableTransactionManagementTests.java | {
"start": 18914,
"end": 19291
} | class ____ implements ConfigurationCondition {
@Override
public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) {
return false;
}
@Override
public ConfigurationPhase getConfigurationPhase() {
return ConfigurationPhase.REGISTER_BEAN;
}
}
@Configuration
@EnableTransactionManagement(mode = AdviceMode.ASPECTJ)
static | NeverCondition |
java | google__guava | android/guava/src/com/google/common/cache/AbstractLoadingCache.java | {
"start": 1225,
"end": 1853
} | class ____ provide an
* implementation for the {@link #get(Object)} and {@link #getIfPresent} methods. {@link
* #getUnchecked}, {@link #get(Object, Callable)}, and {@link #getAll} are implemented in terms of
* {@code get}; {@link #getAllPresent} is implemented in terms of {@code getIfPresent}; {@link
* #putAll} is implemented in terms of {@link #put}, {@link #invalidateAll(Iterable)} is implemented
* in terms of {@link #invalidate}. The method {@link #cleanUp} is a no-op. All other methods throw
* an {@link UnsupportedOperationException}.
*
* @author Charles Fry
* @since 11.0
*/
@GwtIncompatible
public abstract | and |
java | apache__camel | components/camel-jpa/src/main/java/org/apache/camel/component/jpa/JpaConsumer.java | {
"start": 2722,
"end": 20376
} | class ____ {
private Exchange exchange;
private Object result;
private EntityManager manager;
private DataHolder() {
}
}
public JpaConsumer(JpaEndpoint endpoint, Processor processor) {
super(endpoint, processor);
this.entityManagerFactory = endpoint.getEntityManagerFactory();
this.transactionStrategy = endpoint.getTransactionStrategy();
}
private void recreateEntityManagerIfNeeded() {
// Recreate EntityManager in case it is disposed due to transaction rollback
if (entityManager == null) {
if (getEndpoint().isSharedEntityManager()) {
this.entityManager = SharedEntityManagerCreator.createSharedEntityManager(entityManagerFactory);
} else {
this.entityManager = entityManagerFactory.createEntityManager();
}
LOG.trace("Recreated EntityManager {} on {}", entityManager, this);
}
}
@Override
protected int poll() throws Exception {
// must reset for each poll
shutdownRunningTask = null;
pendingExchanges = 0;
recreateEntityManagerIfNeeded();
final int[] messagePolled = { 0 };
try {
transactionStrategy.executeInTransaction(() -> {
if (getEndpoint().isJoinTransaction()) {
entityManager.joinTransaction();
}
Queue<DataHolder> answer = new LinkedList<>();
Query toExecute = getQueryFactory().createQuery(entityManager);
configureParameters(toExecute);
LOG.trace("Created query {}", toExecute);
List<?> results = toExecute.getResultList();
LOG.trace("Got result list from query {}", results);
// okay we have some response from jpa so lets mark the consumer as ready
forceConsumerAsReady();
for (Object result : results) {
DataHolder holder = new DataHolder();
holder.manager = entityManager;
holder.result = result;
holder.exchange = createExchange(result, entityManager);
answer.add(holder);
}
PersistenceException cause = null;
try {
messagePolled[0] = processBatch(CastUtils.cast(answer));
} catch (PersistenceException e) {
cause = e;
} catch (Exception e) {
cause = new PersistenceException(e);
}
if (cause != null) {
if (!isTransacted()) {
LOG.warn(
"Error processing last message due: {}. Will commit all previous successful processed message, and ignore this last failure.",
cause.getMessage(), cause);
} else {
// rollback all by throwing exception
throw cause;
}
}
// commit
LOG.debug("Flushing EntityManager");
entityManager.flush();
// must clear after flush
entityManager.clear();
});
} catch (Exception e) {
// Potentially EntityManager could be in an inconsistent state after transaction rollback,
// so disposing it to have it recreated in next poll. cf. Java Persistence API 3.3.2 Transaction Rollback
LOG.debug("Disposing EntityManager {} on {} due to coming transaction rollback", entityManager, this);
entityManager.close();
entityManager = null;
throw new PersistenceException(e);
}
return getEndpoint().getCamelContext().getTypeConverter().convertTo(int.class, messagePolled[0]);
}
@Override
public int processBatch(Queue<Object> exchanges) throws Exception {
int total = exchanges.size();
// limit if needed
if (maxMessagesPerPoll > 0 && total > maxMessagesPerPoll) {
LOG.debug("Limiting to maximum messages to poll {} as there were {} messages in this poll.",
maxMessagesPerPoll, total);
total = maxMessagesPerPoll;
}
for (int index = 0; index < total && isBatchAllowed(); index++) {
// only loop if we are started (allowed to run)
DataHolder holder = org.apache.camel.util.ObjectHelper.cast(DataHolder.class, exchanges.poll());
EntityManager batchEntityManager = holder.manager;
Exchange exchange = holder.exchange;
Object result = holder.result;
// add current index and total as properties
exchange.setProperty(ExchangePropertyKey.BATCH_INDEX, index);
exchange.setProperty(ExchangePropertyKey.BATCH_SIZE, total);
exchange.setProperty(ExchangePropertyKey.BATCH_COMPLETE, index == total - 1);
// update pending number of exchanges
pendingExchanges = total - index - 1;
if (lockEntity(result, batchEntityManager)) {
// Run the @PreConsumed callback
createPreDeleteHandler().deleteObject(batchEntityManager, result, exchange);
// process the current exchange
LOG.debug("Processing exchange: {}", exchange);
try {
getProcessor().process(exchange);
} catch (Exception e) {
exchange.setException(e);
}
try {
if (exchange.getException() != null) {
// if we failed then throw exception
throw exchange.getException();
} else {
// Run the @Consumed callback
getDeleteHandler().deleteObject(batchEntityManager, result, exchange);
}
} finally {
releaseExchange(exchange, false);
}
}
}
return total;
}
// Properties
// -------------------------------------------------------------------------
@Override
public JpaEndpoint getEndpoint() {
return (JpaEndpoint) super.getEndpoint();
}
public QueryFactory getQueryFactory() {
if (queryFactory == null) {
queryFactory = createQueryFactory();
if (queryFactory == null) {
throw new IllegalArgumentException(
"No queryType property configured on this consumer, nor an entityType configured on the endpoint so cannot consume");
}
}
return queryFactory;
}
public void setQueryFactory(QueryFactory queryFactory) {
this.queryFactory = queryFactory;
}
public DeleteHandler<Object> getDeleteHandler() {
if (deleteHandler == null) {
deleteHandler = createDeleteHandler();
}
return deleteHandler;
}
public void setDeleteHandler(DeleteHandler<Object> deleteHandler) {
this.deleteHandler = deleteHandler;
}
public DeleteHandler<Object> getPreDeleteHandler() {
if (preDeleteHandler == null) {
preDeleteHandler = createPreDeleteHandler();
}
return preDeleteHandler;
}
public void setPreDeleteHandler(DeleteHandler<Object> preDeleteHandler) {
this.preDeleteHandler = preDeleteHandler;
}
public void setParameters(Map<String, Object> params) {
this.parameters = params;
}
public Map<String, Object> getParameters() {
return parameters;
}
public String getNamedQuery() {
return namedQuery;
}
public void setNamedQuery(String namedQuery) {
this.namedQuery = namedQuery;
}
public LockModeType getLockModeType() {
return lockModeType;
}
public void setLockModeType(LockModeType lockModeType) {
this.lockModeType = lockModeType;
}
public String getNativeQuery() {
return nativeQuery;
}
public void setNativeQuery(String nativeQuery) {
this.nativeQuery = nativeQuery;
}
public String getQuery() {
return query;
}
public void setQuery(String query) {
this.query = query;
}
public Class<?> getResultClass() {
return resultClass;
}
public void setResultClass(Class<?> resultClass) {
this.resultClass = resultClass;
}
public boolean isTransacted() {
return transacted;
}
/**
* Sets whether to run in transacted mode or not.
* <p/>
* This option is default <tt>false</tt>. When <tt>false</tt> then all the good messages will commit, and the first
* failed message will rollback. However when <tt>true</tt>, then all messages will rollback, if just one message
* failed.
*/
public void setTransacted(boolean transacted) {
this.transacted = transacted;
}
/**
* Sets whether to use NOWAIT on lock and silently skip the entity. This allows different instances to process
* entities at the same time but not processing the same entity.
*/
public void setSkipLockedEntity(boolean skipLockedEntity) {
this.skipLockedEntity = skipLockedEntity;
}
public boolean isSkipLockedEntity() {
return skipLockedEntity;
}
// Implementation methods
// -------------------------------------------------------------------------
/**
* A strategy method to lock an object with an exclusive lock so that it can be processed
*
* @param entity the entity to be locked
* @param entityManager entity manager
* @return true if the entity was locked
*/
protected boolean lockEntity(Object entity, EntityManager entityManager) {
if (!getEndpoint().isConsumeLockEntity()) {
return true;
}
try {
LOG.debug("Acquiring exclusive lock on entity: {}", entity);
if (isSkipLockedEntity()) {
entityManager.lock(entity, lockModeType, NOWAIT);
} else {
entityManager.lock(entity, lockModeType);
}
return true;
} catch (Exception e) {
LOG.debug("Failed to achieve lock on entity: {}. Reason: {}", entity, e.getMessage(), e);
if (e instanceof PessimisticLockException || e instanceof OptimisticLockException) {
//transaction marked as rollback can't continue gracefully
throw (PersistenceException) e;
}
//TODO: Find if possible an alternative way to handle results of native queries.
//Result of native queries are Arrays and cannot be locked by all JPA Providers.
return entity.getClass().isArray();
}
}
protected QueryFactory createQueryFactory() {
if (query != null) {
return QueryBuilder.query(query);
} else if (namedQuery != null) {
return QueryBuilder.namedQuery(namedQuery);
} else if (nativeQuery != null) {
if (resultClass != null) {
return QueryBuilder.nativeQuery(nativeQuery, resultClass);
} else {
return QueryBuilder.nativeQuery(nativeQuery);
}
} else {
Class<?> entityType = getEndpoint().getEntityType();
if (entityType == null) {
return null;
} else {
// Check if we have a property name on the @Entity annotation
String name = getEntityName(entityType);
if (name != null) {
return QueryBuilder.query("select x from " + name + " x");
} else {
// Remove package name of the entity to be conform with JPA 1.0 spec
return QueryBuilder.query("select x from " + entityType.getSimpleName() + " x");
}
}
}
}
protected String getEntityName(Class<?> clazz) {
Entity entity = clazz.getAnnotation(Entity.class);
// Check if the property name has been defined for Entity annotation
if (entity != null && !entity.name().isEmpty()) {
return entity.name();
} else {
return null;
}
}
protected DeleteHandler<Object> createPreDeleteHandler() {
// Look for @PreConsumed to allow custom callback before the Entity has been consumed
final Class<?> entityType = getEndpoint().getEntityType();
if (entityType != null) {
// Inspect the method(s) annotated with @PreConsumed
List<Method> methods = org.apache.camel.util.ObjectHelper.findMethodsWithAnnotation(entityType, PreConsumed.class);
if (methods.size() > 1) {
throw new IllegalStateException(
"Only one method can be annotated with the @PreConsumed annotation but found: " + methods);
} else if (methods.size() == 1) {
// Inspect the parameters of the @PreConsumed method
final Method method = methods.get(0);
final boolean useExchangeParameter = checkParameters(method);
return (EntityManager em, Object entityBean, Exchange exchange) -> {
// The entityBean could be an Object array
if (entityType.isInstance(entityBean)) {
if (useExchangeParameter) {
ObjectHelper.invokeMethod(method, entityBean, exchange);
} else {
ObjectHelper.invokeMethod(method, entityBean);
}
}
};
}
}
// else do nothing
return (EntityManager em, Object entityBean, Exchange exchange) -> {
};
}
protected DeleteHandler<Object> createDeleteHandler() {
// look for @Consumed to allow custom callback when the Entity has been consumed
final Class<?> entityType = getEndpoint().getEntityType();
if (entityType != null) {
List<Method> methods = org.apache.camel.util.ObjectHelper.findMethodsWithAnnotation(entityType, Consumed.class);
if (methods.size() > 1) {
throw new IllegalArgumentException(
"Only one method can be annotated with the @Consumed annotation but found: " + methods);
} else if (methods.size() == 1) {
final Method method = methods.get(0);
final boolean useExchangeParameter = checkParameters(method);
return (EntityManager em, Object entityBean, Exchange exchange) -> {
if (entityType.isInstance(entityBean)) {
if (useExchangeParameter) {
ObjectHelper.invokeMethod(method, entityBean, exchange);
} else {
ObjectHelper.invokeMethod(method, entityBean);
}
}
};
}
}
if (getEndpoint().isConsumeDelete()) {
return (EntityManager em, Object entityBean, Exchange exchange) -> em.remove(entityBean);
}
return (EntityManager em, Object entityBean, Exchange exchange) -> {
};
}
protected boolean checkParameters(Method method) {
boolean result = false;
Class<?>[] receivedParameters = method.getParameterTypes();
if (receivedParameters.length == 1 && receivedParameters[0].isAssignableFrom(Exchange.class)) {
result = true;
}
if (receivedParameters.length > 0 && !result) {
throw new IllegalStateException("@PreConsumed annotated method cannot have parameter other than Exchange");
}
return result;
}
protected void configureParameters(Query query) {
int maxResults = getEndpoint().getMaximumResults();
if (maxResults > 0) {
query.setMaxResults(maxResults);
}
// setup the parameter
if (parameters != null) {
for (Entry<String, Object> entry : parameters.entrySet()) {
query.setParameter(entry.getKey(), entry.getValue());
}
}
}
protected Exchange createExchange(Object result, EntityManager entityManager) {
Exchange exchange = createExchange(false);
exchange.getIn().setBody(result);
exchange.getIn().setHeader(JpaConstants.ENTITY_MANAGER, entityManager);
return exchange;
}
@Override
protected void doInit() throws Exception {
super.doInit();
// need to setup entity manager first
if (getEndpoint().isSharedEntityManager()) {
this.entityManager = SharedEntityManagerCreator.createSharedEntityManager(entityManagerFactory);
} else {
this.entityManager = entityManagerFactory.createEntityManager();
}
LOG.trace("Created EntityManager {} on {}", entityManager, this);
}
@Override
protected void doStop() throws Exception {
// noop
}
@Override
protected void doShutdown() throws Exception {
if (entityManager != null) {
this.entityManager.close();
LOG.trace("Closed EntityManager {} on {}", entityManager, this);
}
super.doShutdown();
}
}
| DataHolder |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/JoinedInheritanceTreatedJoinNullnessTest.java | {
"start": 4949,
"end": 5296
} | class ____ {
@Id
@GeneratedValue
private Long id;
@Column
private String displayName;
public AbstractCompany() {
}
public AbstractCompany(String displayName) {
this.displayName = displayName;
}
}
@Entity( name = "AbstractDcCompany" )
@Inheritance( strategy = InheritanceType.JOINED )
public abstract static | AbstractCompany |
java | apache__kafka | streams/integration-tests/src/test/java/org/apache/kafka/streams/integration/VersionedKeyValueStoreIntegrationTest.java | {
"start": 27979,
"end": 30814
} | class ____ implements Processor<Integer, String, Integer, Integer> {
private ProcessorContext<Integer, Integer> context;
private TimestampedKeyValueStore<Integer, String> store;
// in-memory copy of seen data, to validate for testing purposes.
private final Map<Integer, Optional<ValueAndTimestamp<String>>> data;
TimestampedStoreContentCheckerProcessor() {
this.data = new HashMap<>();
}
@Override
public void init(final ProcessorContext<Integer, Integer> context) {
this.context = context;
store = context.getStateStore(STORE_NAME);
}
@Override
public void process(final Record<Integer, String> record) {
// add record to store
if (DataTracker.DELETE_VALUE_KEYWORD.equals(record.value())) {
// special value "delete" is interpreted as a delete() call from
// VersionedStoreContentCheckerProcessor but we do not support it here
throw new IllegalArgumentException("Using 'delete' keyword for "
+ "TimestampedStoreContentCheckerProcessor will result in the record "
+ "timestamp being ignored. Use regular put with null value instead.");
}
final ValueAndTimestamp<String> valueAndTimestamp = ValueAndTimestamp.make(record.value(), record.timestamp());
store.put(record.key(), valueAndTimestamp);
data.put(record.key(), Optional.ofNullable(valueAndTimestamp));
// check expected contents of store, and signal completion by writing
// number of failures to downstream
final int failedChecks = checkStoreContents();
context.forward(record.withValue(failedChecks));
}
/**
* @return number of failed checks
*/
private int checkStoreContents() {
int failedChecks = 0;
for (final Map.Entry<Integer, Optional<ValueAndTimestamp<String>>> keyWithValueAndTimestamp : data.entrySet()) {
final Integer key = keyWithValueAndTimestamp.getKey();
final ValueAndTimestamp<String> valueAndTimestamp = keyWithValueAndTimestamp.getValue().orElse(null);
// validate get from store
final ValueAndTimestamp<String> record = store.get(key);
if (!Objects.equals(record, valueAndTimestamp)) {
failedChecks++;
}
}
return failedChecks;
}
}
/**
* Same as {@link VersionedStoreContentCheckerProcessor} but for regular key-value stores instead,
* for use in validating the manual upgrade path from non-versioned to versioned stores.
*/
private static | TimestampedStoreContentCheckerProcessor |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/OracleTypes.java | {
"start": 180,
"end": 508
} | class ____ {
public static final int CURSOR = -10;
public static final int JSON = 2016;
public static final int VECTOR = -105;
public static final int VECTOR_INT8 = -106;
public static final int VECTOR_FLOAT32 = -107;
public static final int VECTOR_FLOAT64 = -108;
public static final int VECTOR_BINARY = -109;
}
| OracleTypes |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inlineme/ValidatorTest.java | {
"start": 20252,
"end": 21121
} | class ____ {
@InlineMe(
replacement = "this.setDeadline(Client.Builder.parseDuration(deadline))",
imports = {"com.google.frobber.Client"})
@Deprecated
public void setDeadline(String deadline) {
setDeadline(Client.Builder.parseDuration(deadline));
}
public void setDeadline(Duration deadline) {}
public static Duration parseDuration(String string) {
return Duration.parse(string);
}
}
}
""")
.doTest();
}
@Test
public void assignmentToPrivateField() {
helper
.addSourceLines(
"RpcClient.java",
"""
import com.google.errorprone.annotations.InlineMe;
public final | Builder |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/AbstractPointGeometryFieldMapper.java | {
"start": 1081,
"end": 2131
} | class ____<T> extends AbstractGeometryFieldMapper<T> {
public static <T> Parameter<T> nullValueParam(
Function<FieldMapper, T> initializer,
TriFunction<String, MappingParserContext, Object, T> parser,
Supplier<T> def,
Serializer<T> serializer
) {
return new Parameter<T>("null_value", false, def, parser, initializer, serializer, Objects::toString);
}
protected final T nullValue;
protected AbstractPointGeometryFieldMapper(
String simpleName,
MappedFieldType mappedFieldType,
BuilderParams builderParams,
Explicit<Boolean> ignoreMalformed,
Explicit<Boolean> ignoreZValue,
T nullValue,
Parser<T> parser
) {
super(simpleName, mappedFieldType, builderParams, ignoreMalformed, ignoreZValue, parser);
this.nullValue = nullValue;
}
public T getNullValue() {
return nullValue;
}
/** A base parser implementation for point formats */
protected abstract static | AbstractPointGeometryFieldMapper |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/savedrequest/Enumerator.java | {
"start": 1097,
"end": 1214
} | class ____ based on code in Apache Tomcat.
* </p>
*
* @author Craig McClanahan
* @author Andrey Grebnev
*/
public | is |
java | quarkusio__quarkus | extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/QuteProcessor.java | {
"start": 60834,
"end": 70852
} | enum ____) are ignored
Predicate<AnnotationTarget> filter = QuteProcessor::defaultFilter;
Predicate<AnnotationTarget> enumConstantFilter = QuteProcessor::enumConstantFilter;
filter = filter.and(enumConstantFilter.or(not(QuteProcessor::staticsFilter)));
return filter;
}
private CheckedTemplateBuildItem findCheckedTemplate(QuteConfig config, TemplateAnalysis analysis,
List<CheckedTemplateBuildItem> checkedTemplates) {
// Try to find the checked template
String path = analysis.path;
for (String suffix : config.suffixes()) {
if (path.endsWith(suffix)) {
path = path.substring(0, path.length() - (suffix.length() + 1));
break;
}
}
for (CheckedTemplateBuildItem item : checkedTemplates) {
if (item.isFragment()) {
continue;
}
if (item.templateId.equals(path)) {
return item;
}
}
return null;
}
static String buildIgnorePattern(Iterable<String> names) {
// ^(?!\\Qbar\\P|\\Qfoo\\P).*$
StringBuilder pattern = new StringBuilder("^(?!");
Iterator<String> it = names.iterator();
if (!it.hasNext()) {
throw new IllegalArgumentException();
}
while (it.hasNext()) {
String name = (String) it.next();
pattern.append(Pattern.quote(name));
if (it.hasNext()) {
pattern.append("|");
}
}
pattern.append(").*$");
return pattern.toString();
}
@SuppressForbidden(reason = "Type#toString() is what we want to use here")
static MatchResult validateNestedExpressions(QuteConfig config, TemplateAnalysis templateAnalysis, ClassInfo rootClazz,
Map<String, MatchResult> results,
Iterable<Predicate<TypeCheck>> excludes, BuildProducer<IncorrectExpressionBuildItem> incorrectExpressions,
Expression expression, IndexView index,
Map<DotName, Set<String>> implicitClassToMembersUsed, Function<String, String> templateIdToPathFun,
Map<Integer, MatchResult> generatedIdsToMatches, Iterable<Predicate<TypeCheck>> extensionMethodExcludes,
CheckedTemplateBuildItem checkedTemplate, JavaMemberLookupConfig lookupConfig, Map<String, BeanInfo> namedBeans,
Map<String, TemplateDataBuildItem> namespaceTemplateData,
List<TemplateExtensionMethodBuildItem> regularExtensionMethods,
Map<String, List<TemplateExtensionMethodBuildItem>> namespaceToExtensionMethods,
AssignabilityCheck assignabilityCheck,
List<TemplateGlobalBuildItem> globals) {
LOGGER.debugf("Validate %s from %s", expression, expression.getOrigin());
// ==============================================
// Validate parameters of nested virtual methods
// ==============================================
validateParametersOfNestedVirtualMethods(config, templateAnalysis, results, excludes, incorrectExpressions, expression,
index, implicitClassToMembersUsed, templateIdToPathFun, generatedIdsToMatches, extensionMethodExcludes,
checkedTemplate, lookupConfig, namedBeans, namespaceTemplateData, regularExtensionMethods,
namespaceToExtensionMethods, assignabilityCheck, globals);
MatchResult match = new MatchResult(assignabilityCheck);
// ======================
// Process the namespace
// ======================
NamespaceResult namespaceResult = processNamespace(expression, match, index, incorrectExpressions, namedBeans, results,
templateAnalysis, namespaceTemplateData, lookupConfig, namespaceToExtensionMethods, templateIdToPathFun,
globals);
if (namespaceResult.ignoring) {
return match;
}
if (namespaceResult.hasRootClazz()) {
rootClazz = namespaceResult.rootClazz;
}
if (namespaceResult.hasLookupConfig()) {
lookupConfig = namespaceResult.lookupConfig;
}
// =====================================
// Validate checked template expression
// =====================================
if (isInvalidCheckedTemplateExpression(config, checkedTemplate, expression, match, results,
namespaceResult.dataNamespaceExpTypeInfo,
incorrectExpressions)) {
return match;
}
// ==========================================
// Skip validation if no type info available
// ==========================================
if (rootClazz == null && !expression.hasTypeInfo() && !namespaceResult.hasDataNamespaceInfo()) {
return putResult(match, results, expression);
}
// Parse the type info
List<Info> parts = TypeInfos.create(expression, index, templateIdToPathFun);
Iterator<Info> iterator = parts.iterator();
Info root = iterator.next();
// ======================
// Process the root part
// ======================
RootResult rootResult = processRoot(expression, match, root, iterator, templateAnalysis, index, incorrectExpressions,
rootClazz, parts, results, generatedIdsToMatches, templateIdToPathFun, assignabilityCheck, namespaceResult);
if (rootResult.ignoring) {
return match;
}
// Reset the iterator if necessary
iterator = rootResult.iterator;
// Iterate over all parts of the expression and check each part against the current match type
while (iterator.hasNext()) {
Info info = iterator.next();
if (!match.isEmpty()) {
// Arrays are handled specifically
// We use the built-in resolver at runtime because the extension methods cannot be used to cover all combinations of dimensions and component types
if (match.isArray() && processArray(info, match)) {
continue;
}
AnnotationTarget member = null;
TemplateExtensionMethodBuildItem extensionMethod = null;
Type type = null;
if (!match.isPrimitive()) {
// Try to find a java member
Set<String> membersUsed = implicitClassToMembersUsed.get(match.type().name());
if (membersUsed == null) {
membersUsed = new HashSet<>();
implicitClassToMembersUsed.put(match.type().name(), membersUsed);
}
if (match.clazz() != null) {
if (info.isVirtualMethod()) {
member = findMethod(info.part.asVirtualMethod(), match.clazz(), expression, index,
templateIdToPathFun, results, lookupConfig, assignabilityCheck);
if (member != null) {
membersUsed.add(member.asMethod().name());
}
} else if (info.isProperty()) {
member = findProperty(info.asProperty().name, match.clazz(), lookupConfig);
if (member != null) {
membersUsed
.add(member.kind() == Kind.FIELD ? member.asField().name() : member.asMethod().name());
}
}
}
}
if (member == null) {
// Try to find an extension method
extensionMethod = findTemplateExtensionMethod(info, match.type(), regularExtensionMethods, expression,
index, templateIdToPathFun, results, assignabilityCheck);
if (extensionMethod != null) {
type = resolveType(extensionMethod.getMethod(), match, index, extensionMethod, results, info);
// Test whether the validation of extension method should be skipped
if (skipValidation(extensionMethodExcludes, expression, match, info, type)) {
break;
}
member = extensionMethod.getMethod();
}
}
// Test whether the validation should be skipped
if (member == null && skipValidation(excludes, expression, match, info, match.type())) {
break;
}
if (member == null) {
// No member found - incorrect expression
incorrectExpressions.produce(new IncorrectExpressionBuildItem(expression.toOriginalString(),
info.value, match.type().toString(), expression.getOrigin()));
match.clearValues();
break;
} else {
if (type == null) {
type = resolveType(member, match, index, extensionMethod, results, info);
}
ClassInfo clazz = null;
if (type.kind() == Type.Kind.CLASS || type.kind() == Type.Kind.PARAMETERIZED_TYPE) {
clazz = index.getClassByName(type.name());
}
match.setValues(clazz, type);
if (info.hasHints()) {
// For example a loop section needs to validate the type of an element
processHints(templateAnalysis, info.asHintInfo().hints, match, index, expression, generatedIdsToMatches,
incorrectExpressions);
}
}
} else {
LOGGER.debugf(
"No match | constants |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MergeManagerImpl.java | {
"start": 4326,
"end": 14693
} | class ____ run during in-memory merge, if defined.
*/
private final Class<? extends Reducer> combinerClass;
/**
* Resettable collector used for combine.
*/
private final CombineOutputCollector<K,V> combineCollector;
private final Counters.Counter spilledRecordsCounter;
private final Counters.Counter reduceCombineInputCounter;
private final Counters.Counter mergedMapOutputsCounter;
private final CompressionCodec codec;
private final Progress mergePhase;
public MergeManagerImpl(TaskAttemptID reduceId, JobConf jobConf,
FileSystem localFS,
LocalDirAllocator localDirAllocator,
Reporter reporter,
CompressionCodec codec,
Class<? extends Reducer> combinerClass,
CombineOutputCollector<K,V> combineCollector,
Counters.Counter spilledRecordsCounter,
Counters.Counter reduceCombineInputCounter,
Counters.Counter mergedMapOutputsCounter,
ExceptionReporter exceptionReporter,
Progress mergePhase, MapOutputFile mapOutputFile) {
this.reduceId = reduceId;
this.jobConf = jobConf;
this.localDirAllocator = localDirAllocator;
this.exceptionReporter = exceptionReporter;
this.reporter = reporter;
this.codec = codec;
this.combinerClass = combinerClass;
this.combineCollector = combineCollector;
this.reduceCombineInputCounter = reduceCombineInputCounter;
this.spilledRecordsCounter = spilledRecordsCounter;
this.mergedMapOutputsCounter = mergedMapOutputsCounter;
this.mapOutputFile = mapOutputFile;
this.mapOutputFile.setConf(jobConf);
this.localFS = localFS;
this.rfs = ((LocalFileSystem)localFS).getRaw();
final float maxInMemCopyUse =
jobConf.getFloat(MRJobConfig.SHUFFLE_INPUT_BUFFER_PERCENT,
MRJobConfig.DEFAULT_SHUFFLE_INPUT_BUFFER_PERCENT);
if (maxInMemCopyUse > 1.0 || maxInMemCopyUse < 0.0) {
throw new IllegalArgumentException("Invalid value for " +
MRJobConfig.SHUFFLE_INPUT_BUFFER_PERCENT + ": " +
maxInMemCopyUse);
}
// Allow unit tests to fix Runtime memory
this.memoryLimit = (long)(jobConf.getLong(
MRJobConfig.REDUCE_MEMORY_TOTAL_BYTES,
Runtime.getRuntime().maxMemory()) * maxInMemCopyUse);
this.ioSortFactor = jobConf.getInt(MRJobConfig.IO_SORT_FACTOR,
MRJobConfig.DEFAULT_IO_SORT_FACTOR);
final float singleShuffleMemoryLimitPercent =
jobConf.getFloat(MRJobConfig.SHUFFLE_MEMORY_LIMIT_PERCENT,
DEFAULT_SHUFFLE_MEMORY_LIMIT_PERCENT);
if (singleShuffleMemoryLimitPercent < 0.0f
|| singleShuffleMemoryLimitPercent > 1.0f) {
throw new IllegalArgumentException("Invalid value for "
+ MRJobConfig.SHUFFLE_MEMORY_LIMIT_PERCENT + ": "
+ singleShuffleMemoryLimitPercent);
}
usedMemory = 0L;
commitMemory = 0L;
long maxSingleShuffleLimitConfiged =
(long)(memoryLimit * singleShuffleMemoryLimitPercent);
if(maxSingleShuffleLimitConfiged > Integer.MAX_VALUE) {
maxSingleShuffleLimitConfiged = Integer.MAX_VALUE;
LOG.info("The max number of bytes for a single in-memory shuffle cannot" +
" be larger than Integer.MAX_VALUE. Setting it to Integer.MAX_VALUE");
}
this.maxSingleShuffleLimit = maxSingleShuffleLimitConfiged;
this.memToMemMergeOutputsThreshold =
jobConf.getInt(MRJobConfig.REDUCE_MEMTOMEM_THRESHOLD, ioSortFactor);
this.mergeThreshold = (long)(this.memoryLimit *
jobConf.getFloat(
MRJobConfig.SHUFFLE_MERGE_PERCENT,
MRJobConfig.DEFAULT_SHUFFLE_MERGE_PERCENT));
LOG.info("MergerManager: memoryLimit=" + memoryLimit + ", " +
"maxSingleShuffleLimit=" + maxSingleShuffleLimit + ", " +
"mergeThreshold=" + mergeThreshold + ", " +
"ioSortFactor=" + ioSortFactor + ", " +
"memToMemMergeOutputsThreshold=" + memToMemMergeOutputsThreshold);
if (this.maxSingleShuffleLimit >= this.mergeThreshold) {
throw new RuntimeException("Invalid configuration: "
+ "maxSingleShuffleLimit should be less than mergeThreshold "
+ "maxSingleShuffleLimit: " + this.maxSingleShuffleLimit
+ "mergeThreshold: " + this.mergeThreshold);
}
boolean allowMemToMemMerge =
jobConf.getBoolean(MRJobConfig.REDUCE_MEMTOMEM_ENABLED, false);
if (allowMemToMemMerge) {
this.memToMemMerger =
new IntermediateMemoryToMemoryMerger(this,
memToMemMergeOutputsThreshold);
this.memToMemMerger.start();
} else {
this.memToMemMerger = null;
}
this.inMemoryMerger = createInMemoryMerger();
this.inMemoryMerger.start();
this.onDiskMerger = new OnDiskMerger(this);
this.onDiskMerger.start();
this.mergePhase = mergePhase;
}
protected MergeThread<InMemoryMapOutput<K,V>, K,V> createInMemoryMerger() {
return new InMemoryMerger(this);
}
protected MergeThread<CompressAwarePath,K,V> createOnDiskMerger() {
return new OnDiskMerger(this);
}
TaskAttemptID getReduceId() {
return reduceId;
}
@VisibleForTesting
ExceptionReporter getExceptionReporter() {
return exceptionReporter;
}
@Override
public void waitForResource() throws InterruptedException {
inMemoryMerger.waitForMerge();
}
@Override
public synchronized MapOutput<K,V> reserve(TaskAttemptID mapId,
long requestedSize,
int fetcher
) throws IOException {
if (requestedSize > maxSingleShuffleLimit) {
LOG.info(mapId + ": Shuffling to disk since " + requestedSize +
" is greater than maxSingleShuffleLimit (" +
maxSingleShuffleLimit + ")");
return new OnDiskMapOutput<K,V>(mapId, this, requestedSize, jobConf,
fetcher, true, FileSystem.getLocal(jobConf).getRaw(),
mapOutputFile.getInputFileForWrite(mapId.getTaskID(), requestedSize));
}
// Stall shuffle if we are above the memory limit
// It is possible that all threads could just be stalling and not make
// progress at all. This could happen when:
//
// requested size is causing the used memory to go above limit &&
// requested size < singleShuffleLimit &&
// current used size < mergeThreshold (merge will not get triggered)
//
// To avoid this from happening, we allow exactly one thread to go past
// the memory limit. We check (usedMemory > memoryLimit) and not
// (usedMemory + requestedSize > memoryLimit). When this thread is done
// fetching, this will automatically trigger a merge thereby unlocking
// all the stalled threads
if (usedMemory > memoryLimit) {
LOG.debug(mapId + ": Stalling shuffle since usedMemory (" + usedMemory
+ ") is greater than memoryLimit (" + memoryLimit + ")." +
" CommitMemory is (" + commitMemory + ")");
return null;
}
// Allow the in-memory shuffle to progress
LOG.debug(mapId + ": Proceeding with shuffle since usedMemory ("
+ usedMemory + ") is lesser than memoryLimit (" + memoryLimit + ")."
+ "CommitMemory is (" + commitMemory + ")");
return unconditionalReserve(mapId, requestedSize, true);
}
/**
* Unconditional Reserve is used by the Memory-to-Memory thread
* @return
*/
private synchronized InMemoryMapOutput<K, V> unconditionalReserve(
TaskAttemptID mapId, long requestedSize, boolean primaryMapOutput) {
usedMemory += requestedSize;
return new InMemoryMapOutput<K,V>(jobConf, mapId, this, (int)requestedSize,
codec, primaryMapOutput);
}
synchronized void unreserve(long size) {
usedMemory -= size;
}
public synchronized void closeInMemoryFile(InMemoryMapOutput<K,V> mapOutput) {
inMemoryMapOutputs.add(mapOutput);
LOG.info("closeInMemoryFile -> map-output of size: " + mapOutput.getSize()
+ ", inMemoryMapOutputs.size() -> " + inMemoryMapOutputs.size()
+ ", commitMemory -> " + commitMemory + ", usedMemory ->" + usedMemory);
commitMemory+= mapOutput.getSize();
// Can hang if mergeThreshold is really low.
if (commitMemory >= mergeThreshold) {
LOG.info("Starting inMemoryMerger's merge since commitMemory=" +
commitMemory + " > mergeThreshold=" + mergeThreshold +
". Current usedMemory=" + usedMemory);
inMemoryMapOutputs.addAll(inMemoryMergedMapOutputs);
inMemoryMergedMapOutputs.clear();
inMemoryMerger.startMerge(inMemoryMapOutputs);
commitMemory = 0L; // Reset commitMemory.
}
if (memToMemMerger != null) {
if (inMemoryMapOutputs.size() >= memToMemMergeOutputsThreshold) {
memToMemMerger.startMerge(inMemoryMapOutputs);
}
}
}
public synchronized void closeInMemoryMergedFile(InMemoryMapOutput<K,V> mapOutput) {
inMemoryMergedMapOutputs.add(mapOutput);
LOG.info("closeInMemoryMergedFile -> size: " + mapOutput.getSize() +
", inMemoryMergedMapOutputs.size() -> " +
inMemoryMergedMapOutputs.size());
}
public synchronized void closeOnDiskFile(CompressAwarePath file) {
onDiskMapOutputs.add(file);
if (onDiskMapOutputs.size() >= (2 * ioSortFactor - 1)) {
onDiskMerger.startMerge(onDiskMapOutputs);
}
}
@Override
public RawKeyValueIterator close() throws Throwable {
// Wait for on-going merges to complete
if (memToMemMerger != null) {
memToMemMerger.close();
}
inMemoryMerger.close();
onDiskMerger.close();
List<InMemoryMapOutput<K, V>> memory =
new ArrayList<InMemoryMapOutput<K, V>>(inMemoryMergedMapOutputs);
inMemoryMergedMapOutputs.clear();
memory.addAll(inMemoryMapOutputs);
inMemoryMapOutputs.clear();
List<CompressAwarePath> disk = new ArrayList<CompressAwarePath>(onDiskMapOutputs);
onDiskMapOutputs.clear();
return finalMerge(jobConf, rfs, memory, disk);
}
private | to |
java | google__dagger | java/dagger/testing/compile/CompilerTests.java | {
"start": 19069,
"end": 20981
} | class ____. */
public static Compiler compiler() {
return javac().withClasspath(ImmutableList.of(compilerDepsJar()));
}
public static void compileWithKapt(
List<Source> sources,
TemporaryFolder tempFolder,
Consumer<TestCompilationResult> onCompilationResult) {
compileWithKapt(sources, ImmutableMap.of(), tempFolder, onCompilationResult);
}
public static void compileWithKapt(
List<Source> sources,
Map<String, String> processorOptions,
TemporaryFolder tempFolder,
Consumer<TestCompilationResult> onCompilationResult) {
TestCompilationResult result =
TestKotlinCompilerKt.compile(
tempFolder.getRoot(),
new TestCompilationArguments(
sources,
/* classpath= */ ImmutableList.of(compilerDepsJar()),
/* inheritClasspath= */ false,
/* javacArguments= */ DEFAULT_JAVAC_OPTIONS,
/* kotlincArguments= */ DEFAULT_KOTLINC_OPTIONS,
/* kaptProcessors= */ ImmutableList.of(new ComponentProcessor()),
/* symbolProcessorProviders= */ ImmutableList.of(),
/* processorOptions= */ processorOptions));
onCompilationResult.accept(result);
}
private static File getRunfilesDir() {
return getRunfilesPath().toFile();
}
private static Path getRunfilesPath() {
Path propPath = getRunfilesPath(System.getProperties());
if (propPath != null) {
return propPath;
}
Path envPath = getRunfilesPath(System.getenv());
if (envPath != null) {
return envPath;
}
Path cwd = Paths.get("").toAbsolutePath();
return cwd.getParent();
}
private static Path getRunfilesPath(Map<?, ?> map) {
String runfilesPath = (String) map.get("TEST_SRCDIR");
return isNullOrEmpty(runfilesPath) ? null : Paths.get(runfilesPath);
}
private CompilerTests() {}
}
| path |
java | spring-projects__spring-security | docs/src/test/java/org/springframework/security/docs/features/integrations/rest/configurationwebclient/ServerWebClientHttpInterfaceIntegrationConfiguration.java | {
"start": 1845,
"end": 2650
} | class ____ {
// tag::config[]
@Bean
OAuth2WebClientHttpServiceGroupConfigurer securityConfigurer(
ReactiveOAuth2AuthorizedClientManager manager) {
return OAuth2WebClientHttpServiceGroupConfigurer.from(manager);
}
// end::config[]
@Bean
ReactiveOAuth2AuthorizedClientManager authorizedClientManager() {
return mock(ReactiveOAuth2AuthorizedClientManager.class);
}
@Bean
WebClientHttpServiceGroupConfigurer groupConfigurer(MockWebServer server) {
return groups -> {
String baseUrl = server.url("").toString();
groups
.forEachClient((group, builder) -> builder
.baseUrl(baseUrl)
.defaultHeader("Accept", "application/vnd.github.v3+json"));
};
}
@Bean
MockWebServer mockServer() {
return new MockWebServer();
}
}
| ServerWebClientHttpInterfaceIntegrationConfiguration |
java | apache__camel | core/camel-core-model/src/main/java/org/apache/camel/model/dataformat/UniVocityTsvDataFormat.java | {
"start": 1441,
"end": 2449
} | class ____ extends UniVocityAbstractDataFormat {
@XmlAttribute
@Metadata(label = "advanced", defaultValue = "\\")
private String escapeChar;
public UniVocityTsvDataFormat() {
super("univocityTsv");
}
protected UniVocityTsvDataFormat(UniVocityTsvDataFormat source) {
super(source);
this.escapeChar = source.escapeChar;
}
private UniVocityTsvDataFormat(Builder builder) {
super("univocityTsv", builder);
this.escapeChar = builder.escapeChar;
}
@Override
public UniVocityTsvDataFormat copyDefinition() {
return new UniVocityTsvDataFormat(this);
}
public String getEscapeChar() {
return escapeChar;
}
/**
* The escape character.
*/
public void setEscapeChar(String escapeChar) {
this.escapeChar = escapeChar;
}
/**
* {@code Builder} is a specific builder for {@link UniVocityTsvDataFormat}.
*/
@XmlTransient
public static | UniVocityTsvDataFormat |
java | apache__kafka | connect/runtime/src/main/java/org/apache/kafka/connect/runtime/ExactlyOnceWorkerSourceTask.java | {
"start": 17462,
"end": 23641
} | class ____ {
protected boolean shouldCommitTransactionForRecord(SourceRecord record) {
return false;
}
protected boolean shouldCommitTransactionForBatch(long currentTimeMs) {
return false;
}
protected boolean shouldCommitFinalTransaction() {
return false;
}
/**
* Hook to signal that a new transaction cycle has been started. May be invoked
* multiple times if the task is paused and then resumed. It can be assumed that
* a new transaction is created at least every time an existing transaction is
* committed; this is just a hook to notify that a new transaction may have been
* created outside of that flow as well.
*/
protected void initialize() {
}
public void maybeCommitTransactionForRecord(SourceRecord record) {
maybeCommitTransaction(shouldCommitTransactionForRecord(record));
}
public void maybeCommitTransactionForBatch() {
maybeCommitTransaction(shouldCommitTransactionForBatch(time.milliseconds()));
}
public void maybeCommitFinalTransaction() {
maybeCommitTransaction(shouldCommitFinalTransaction());
}
private void maybeCommitTransaction(boolean shouldCommit) {
if (shouldCommit) {
try (LoggingContext loggingContext = LoggingContext.forOffsets(id)) {
commitTransaction();
}
}
}
}
private TransactionBoundaryManager buildTransactionManager(
WorkerConfig workerConfig,
SourceConnectorConfig sourceConfig,
WorkerTransactionContext transactionContext) {
TransactionBoundary boundary = sourceConfig.transactionBoundary();
return switch (boundary) {
case POLL -> new TransactionBoundaryManager() {
@Override
protected boolean shouldCommitTransactionForBatch(long currentTimeMs) {
return true;
}
@Override
protected boolean shouldCommitFinalTransaction() {
return true;
}
};
case INTERVAL -> {
long transactionBoundaryInterval = Optional.ofNullable(sourceConfig.transactionBoundaryInterval())
.orElse(workerConfig.offsetCommitInterval());
yield new TransactionBoundaryManager() {
private final long commitInterval = transactionBoundaryInterval;
private long lastCommit;
@Override
public void initialize() {
this.lastCommit = time.milliseconds();
}
@Override
protected boolean shouldCommitTransactionForBatch(long currentTimeMs) {
if (time.milliseconds() >= lastCommit + commitInterval) {
lastCommit = time.milliseconds();
return true;
} else {
return false;
}
}
@Override
protected boolean shouldCommitFinalTransaction() {
return true;
}
};
}
case CONNECTOR -> {
Objects.requireNonNull(transactionContext, "Transaction context must be provided when using connector-defined transaction boundaries");
yield new TransactionBoundaryManager() {
@Override
protected boolean shouldCommitFinalTransaction() {
return shouldCommitTransactionForBatch(time.milliseconds());
}
@Override
protected boolean shouldCommitTransactionForBatch(long currentTimeMs) {
if (transactionContext.shouldAbortBatch()) {
log.info("Aborting transaction for batch as requested by connector");
maybeAbortTransaction();
// We abort the transaction, which causes all the records up to this point to be dropped, but we still want to
// commit offsets so that the task doesn't see the same records all over again
return true;
}
return transactionContext.shouldCommitBatch();
}
@Override
protected boolean shouldCommitTransactionForRecord(SourceRecord record) {
if (transactionContext.shouldAbortOn(record)) {
log.info("Aborting transaction for record on topic {} as requested by connector", record.topic());
log.trace("Last record in aborted transaction: {}", record);
maybeAbortTransaction();
// We abort the transaction, which causes all the records up to this point to be dropped, but we still want to
// commit offsets so that the task doesn't see the same records all over again
return true;
}
return transactionContext.shouldCommitOn(record);
}
private void maybeAbortTransaction() {
if (!transactionOpen) {
log.warn("Ignoring request by task to abort transaction as the current transaction is empty");
return;
}
producer.abortTransaction();
transactionMetrics.abortTransaction();
transactionOpen = false;
}
};
}
};
}
TransactionMetricsGroup transactionMetricsGroup() {
return transactionMetrics;
}
static | TransactionBoundaryManager |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/idclass/IdClassWithLazyManyToOneTest.java | {
"start": 1174,
"end": 6345
} | class ____ {
@BeforeEach
public void setUp(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Subsystem subsystem = new Subsystem( "1", "Linux" );
SystemUser systemUser = new SystemUser( subsystem, "admin", "Andrea" );
session.persist( subsystem );
session.persist( systemUser );
}
);
}
@AfterEach
public void tearDown(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
public void testGet(SessionFactoryScope scope) {
SQLStatementInspector statementInspector = scope.getCollectingStatementInspector();
statementInspector.clear();
scope.inTransaction(
session -> {
PK pk = new PK( new Subsystem( "1", "Linux2" ), "admin" );
SystemUser systemUser = session.get( SystemUser.class, pk );
assertThat( systemUser.getName(), is( "Andrea" ) );
Subsystem subsystem = systemUser.getSubsystem();
statementInspector.assertExecutedCount( 1 );
statementInspector.assertNumberOfOccurrenceInQuery(
0,
"join",
0
);
statementInspector.clear();
assertFalse( Hibernate.isInitialized( subsystem ) );
assertThat( subsystem.getId(), is( "1" ) );
assertThat( subsystem.getDescription(), is( "Linux" ) );
assertThat( systemUser.getUsername(), is( "admin" ) );
assertTrue( Hibernate.isInitialized( subsystem ) );
statementInspector.assertExecutedCount( 1 );
statementInspector.assertNumberOfOccurrenceInQuery(
0,
"join",
0
);
}
);
}
@Test
public void testHql(SessionFactoryScope scope) {
SQLStatementInspector statementInspector = scope.getCollectingStatementInspector();
statementInspector.clear();
scope.inTransaction(
session -> {
PK pk = new PK( new Subsystem( "1", "Linux2" ), "admin" );
SystemUser systemUser = session.createQuery(
"from SystemUser s where s.id = :id",
SystemUser.class
).setParameter( "id", pk ).getSingleResult();
assertThat( systemUser.getName(), is( "Andrea" ) );
Subsystem subsystem = systemUser.getSubsystem();
assertFalse( Hibernate.isInitialized( subsystem ) );
statementInspector.assertExecutedCount( 1 );
statementInspector.assertNumberOfOccurrenceInQuery(
0,
"join",
0
);
statementInspector.clear();
assertThat( subsystem.getId(), is( "1" ) );
assertThat( subsystem.getDescription(), is( "Linux" ) );
assertThat( systemUser.getUsername(), is( "admin" ) );
assertTrue( Hibernate.isInitialized( subsystem ) );
statementInspector.assertExecutedCount( 1 );
statementInspector.assertNumberOfOccurrenceInQuery(
0,
"join",
0
);
}
);
statementInspector.clear();
scope.inTransaction(
session -> {
SystemUser systemUser = session.createQuery(
"from SystemUser s where s.username = :username",
SystemUser.class
).setParameter( "username", "admin" ).getSingleResult();
assertThat( systemUser.getName(), is( "Andrea" ) );
Subsystem subsystem = systemUser.getSubsystem();
assertFalse( Hibernate.isInitialized( subsystem ) );
statementInspector.assertExecutedCount( 1 );
statementInspector.assertNumberOfOccurrenceInQuery(
0,
"join",
0
);
statementInspector.clear();
assertThat( subsystem.getId(), is( "1" ) );
assertThat( subsystem.getDescription(), is( "Linux" ) );
assertThat( systemUser.getUsername(), is( "admin" ) );
assertTrue( Hibernate.isInitialized( subsystem ) );
statementInspector.assertExecutedCount( 1 );
statementInspector.assertNumberOfOccurrenceInQuery(
0,
"join",
0
);
}
);
}
@Test
public void testHql2(SessionFactoryScope scope) {
SQLStatementInspector statementInspector = scope.getCollectingStatementInspector();
statementInspector.clear();
scope.inTransaction(
session -> {
// intentionally set the Subsystem description to "Linux6", only the Subsystem.id value is used for the parameter binding
PK pk = new PK( new Subsystem( "1", "Linux6" ), "admin" );
SystemUser systemUser = session.createQuery(
"from SystemUser s where s.id = :id",
SystemUser.class
).setParameter( "id", pk ).getSingleResult();
assertThat( systemUser.getName(), is( "Andrea" ) );
Subsystem subsystem = systemUser.getSubsystem();
assertFalse( Hibernate.isInitialized( subsystem ) );
statementInspector.assertExecutedCount( 1 );
statementInspector.assertNumberOfOccurrenceInQuery(
0,
"join",
0
);
statementInspector.clear();
assertThat( subsystem.getId(), is( "1" ) );
assertThat( subsystem.getDescription(), is( "Linux" ) );
assertThat( systemUser.getUsername(), is( "admin" ) );
assertTrue( Hibernate.isInitialized( subsystem ) );
statementInspector.assertExecutedCount( 1 );
statementInspector.assertNumberOfOccurrenceInQuery(
0,
"join",
0
);
}
);
}
@Entity(name = "SystemUser")
@IdClass(PK.class)
public static | IdClassWithLazyManyToOneTest |
java | spring-projects__spring-boot | module/spring-boot-micrometer-metrics/src/main/java/org/springframework/boot/micrometer/metrics/actuate/endpoint/MetricsEndpoint.java | {
"start": 7784,
"end": 8240
} | class ____ {
private final Statistic statistic;
private final Double value;
Sample(Statistic statistic, Double value) {
this.statistic = statistic;
this.value = value;
}
public Statistic getStatistic() {
return this.statistic;
}
public Double getValue() {
return this.value;
}
@Override
public String toString() {
return "MeasurementSample{statistic=" + this.statistic + ", value=" + this.value + '}';
}
}
}
| Sample |
java | apache__rocketmq | remoting/src/main/java/org/apache/rocketmq/remoting/CommandCallback.java | {
"start": 848,
"end": 898
} | interface ____ {
void accept();
}
| CommandCallback |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/config/ObjectFactoryCreatingFactoryBeanTests.java | {
"start": 1603,
"end": 5330
} | class ____ {
private DefaultListableBeanFactory beanFactory;
@BeforeEach
void setup() {
this.beanFactory = new DefaultListableBeanFactory();
new XmlBeanDefinitionReader(this.beanFactory).loadBeanDefinitions(
qualifiedResource(ObjectFactoryCreatingFactoryBeanTests.class, "context.xml"));
this.beanFactory.setSerializationId("test");
}
@AfterEach
void close() {
this.beanFactory.setSerializationId(null);
}
@Test
void testFactoryOperation() {
FactoryTestBean testBean = beanFactory.getBean("factoryTestBean", FactoryTestBean.class);
ObjectFactory<?> objectFactory = testBean.getObjectFactory();
Date date1 = (Date) objectFactory.getObject();
Date date2 = (Date) objectFactory.getObject();
assertThat(date1).isNotSameAs(date2);
}
@Test
void testFactorySerialization() throws Exception {
FactoryTestBean testBean = beanFactory.getBean("factoryTestBean", FactoryTestBean.class);
ObjectFactory<?> objectFactory = testBean.getObjectFactory();
objectFactory = SerializationTestUtils.serializeAndDeserialize(objectFactory);
Date date1 = (Date) objectFactory.getObject();
Date date2 = (Date) objectFactory.getObject();
assertThat(date1).isNotSameAs(date2);
}
@Test
void testProviderOperation() {
ProviderTestBean testBean = beanFactory.getBean("providerTestBean", ProviderTestBean.class);
Provider<?> provider = testBean.getProvider();
Date date1 = (Date) provider.get();
Date date2 = (Date) provider.get();
assertThat(date1).isNotSameAs(date2);
}
@Test
void testProviderSerialization() throws Exception {
ProviderTestBean testBean = beanFactory.getBean("providerTestBean", ProviderTestBean.class);
Provider<?> provider = testBean.getProvider();
provider = SerializationTestUtils.serializeAndDeserialize(provider);
Date date1 = (Date) provider.get();
Date date2 = (Date) provider.get();
assertThat(date1).isNotSameAs(date2);
}
@Test
void testDoesNotComplainWhenTargetBeanNameRefersToSingleton() throws Exception {
final String targetBeanName = "singleton";
final String expectedSingleton = "Alicia Keys";
BeanFactory beanFactory = mock();
given(beanFactory.getBean(targetBeanName)).willReturn(expectedSingleton);
ObjectFactoryCreatingFactoryBean factory = new ObjectFactoryCreatingFactoryBean();
factory.setTargetBeanName(targetBeanName);
factory.setBeanFactory(beanFactory);
factory.afterPropertiesSet();
ObjectFactory<?> objectFactory = factory.getObject();
Object actualSingleton = objectFactory.getObject();
assertThat(actualSingleton).isSameAs(expectedSingleton);
}
@Test
void testWhenTargetBeanNameIsNull() {
assertThatIllegalArgumentException().as(
"'targetBeanName' property not set").isThrownBy(
new ObjectFactoryCreatingFactoryBean()::afterPropertiesSet);
}
@Test
void testWhenTargetBeanNameIsEmptyString() {
ObjectFactoryCreatingFactoryBean factory = new ObjectFactoryCreatingFactoryBean();
factory.setTargetBeanName("");
assertThatIllegalArgumentException().as(
"'targetBeanName' property set to (invalid) empty string").isThrownBy(
factory::afterPropertiesSet);
}
@Test
void testWhenTargetBeanNameIsWhitespacedString() {
ObjectFactoryCreatingFactoryBean factory = new ObjectFactoryCreatingFactoryBean();
factory.setTargetBeanName(" \t");
assertThatIllegalArgumentException().as(
"'targetBeanName' property set to (invalid) only-whitespace string").isThrownBy(
factory::afterPropertiesSet);
}
@Test
void testEnsureOFBFBReportsThatItActuallyCreatesObjectFactoryInstances() {
assertThat(new ObjectFactoryCreatingFactoryBean().getObjectType()).as("Must be reporting that it creates ObjectFactory instances (as per | ObjectFactoryCreatingFactoryBeanTests |
java | bumptech__glide | integration/sqljournaldiskcache/src/main/java/com/bumptech/glide/integration/sqljournaldiskcache/JournalTable.java | {
"start": 67,
"end": 218
} | class ____ {
static final String TABLE_NAME = "journal";
private static final String INDEX_TIMESTAMP_KEY = "journal_timestamp_key_idx";
| JournalTable |
java | quarkusio__quarkus | extensions/vertx/runtime/src/main/java/io/quarkus/vertx/core/runtime/config/EventBusConfiguration.java | {
"start": 243,
"end": 2553
} | interface ____ {
/**
* The key configuration for the PEM format.
*/
PemKeyCertConfiguration keyCertificatePem();
/**
* The key configuration for the JKS format.
*/
JksConfiguration keyCertificateJks();
/**
* The key configuration for the PFX format.
*/
PfxConfiguration keyCertificatePfx();
/**
* The trust key configuration for the PEM format.
*/
PemTrustCertConfiguration trustCertificatePem();
/**
* The trust key configuration for the JKS format.
*/
JksConfiguration trustCertificateJks();
/**
* The trust key configuration for the PFX format.
*/
PfxConfiguration trustCertificatePfx();
/**
* The accept backlog.
*/
OptionalInt acceptBacklog();
/**
* The client authentication.
*/
@WithDefault("NONE")
String clientAuth();
/**
* The connect timeout.
*/
@WithDefault("60")
Duration connectTimeout();
/**
* The idle timeout in milliseconds.
*/
Optional<Duration> idleTimeout();
/**
* The receive buffer size.
*/
OptionalInt receiveBufferSize();
/**
* The number of reconnection attempts.
*/
@WithDefault("0")
int reconnectAttempts();
/**
* The reconnection interval in milliseconds.
*/
@WithDefault("1")
Duration reconnectInterval();
/**
* Whether to reuse the address.
*/
@WithDefault("true")
boolean reuseAddress();
/**
* Whether to reuse the port.
*/
@WithDefault("false")
boolean reusePort();
/**
* The send buffer size.
*/
OptionalInt sendBufferSize();
/**
* The so linger.
*/
OptionalInt soLinger();
/**
* Enables or Disabled SSL.
*/
@WithDefault("false")
boolean ssl();
/**
* Whether to keep the TCP connection opened (keep-alive).
*/
@WithDefault("false")
boolean tcpKeepAlive();
/**
* Configure the TCP no delay.
*/
@WithDefault("true")
boolean tcpNoDelay();
/**
* Configure the traffic class.
*/
OptionalInt trafficClass();
/**
* Enables or disables the trust all parameter.
*/
@WithDefault("false")
boolean trustAll();
}
| EventBusConfiguration |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java | {
"start": 5004,
"end": 6108
} | class ____ extends HttpServlet {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
int numHeaders = 0;
hasUnknownHeader = false;
@SuppressWarnings("unchecked")
Enumeration<String> names = req.getHeaderNames();
while(names.hasMoreElements()) {
String headerName = names.nextElement();
if (headerName.equals(UNKNOWN_HEADER)) {
hasUnknownHeader = true;
}
++numHeaders;
}
numberOfHeaders = numHeaders;
resp.setStatus(HttpServletResponse.SC_OK);
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
InputStream is = req.getInputStream();
OutputStream os = resp.getOutputStream();
int c = is.read();
while (c > -1) {
os.write(c);
c = is.read();
}
is.close();
os.close();
resp.setStatus(HttpServletResponse.SC_OK);
}
}
@SuppressWarnings("serial")
public static | TestServlet |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/MiloBrowseComponentBuilderFactory.java | {
"start": 15560,
"end": 20177
} | class ____
extends AbstractComponentBuilder<MiloBrowseComponent>
implements MiloBrowseComponentBuilder {
@Override
protected MiloBrowseComponent buildConcreteComponent() {
return new MiloBrowseComponent();
}
private org.apache.camel.component.milo.client.MiloClientConfiguration getOrCreateConfiguration(MiloBrowseComponent component) {
if (component.getConfiguration() == null) {
component.setConfiguration(new org.apache.camel.component.milo.client.MiloClientConfiguration());
}
return component.getConfiguration();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "clientId": getOrCreateConfiguration((MiloBrowseComponent) component).setClientId((java.lang.String) value); return true;
case "configuration": ((MiloBrowseComponent) component).setConfiguration((org.apache.camel.component.milo.client.MiloClientConfiguration) value); return true;
case "discoveryEndpointSuffix": getOrCreateConfiguration((MiloBrowseComponent) component).setDiscoveryEndpointSuffix((java.lang.String) value); return true;
case "discoveryEndpointUri": getOrCreateConfiguration((MiloBrowseComponent) component).setDiscoveryEndpointUri((java.lang.String) value); return true;
case "lazyStartProducer": ((MiloBrowseComponent) component).setLazyStartProducer((boolean) value); return true;
case "autowiredEnabled": ((MiloBrowseComponent) component).setAutowiredEnabled((boolean) value); return true;
case "allowedSecurityPolicies": getOrCreateConfiguration((MiloBrowseComponent) component).setAllowedSecurityPolicies((java.lang.String) value); return true;
case "applicationName": getOrCreateConfiguration((MiloBrowseComponent) component).setApplicationName((java.lang.String) value); return true;
case "applicationUri": getOrCreateConfiguration((MiloBrowseComponent) component).setApplicationUri((java.lang.String) value); return true;
case "channelLifetime": getOrCreateConfiguration((MiloBrowseComponent) component).setChannelLifetime((java.lang.Long) value); return true;
case "keyAlias": getOrCreateConfiguration((MiloBrowseComponent) component).setKeyAlias((java.lang.String) value); return true;
case "keyPassword": getOrCreateConfiguration((MiloBrowseComponent) component).setKeyPassword((java.lang.String) value); return true;
case "keyStorePassword": getOrCreateConfiguration((MiloBrowseComponent) component).setKeyStorePassword((java.lang.String) value); return true;
case "keyStoreType": getOrCreateConfiguration((MiloBrowseComponent) component).setKeyStoreType((java.lang.String) value); return true;
case "keyStoreUrl": getOrCreateConfiguration((MiloBrowseComponent) component).setKeyStoreUrl((java.lang.String) value); return true;
case "maxPendingPublishRequests": getOrCreateConfiguration((MiloBrowseComponent) component).setMaxPendingPublishRequests((java.lang.Long) value); return true;
case "maxResponseMessageSize": getOrCreateConfiguration((MiloBrowseComponent) component).setMaxResponseMessageSize((java.lang.Long) value); return true;
case "miloClientConnectionManager": ((MiloBrowseComponent) component).setMiloClientConnectionManager((org.apache.camel.component.milo.client.MiloClientConnectionManager) value); return true;
case "overrideHost": getOrCreateConfiguration((MiloBrowseComponent) component).setOverrideHost((boolean) value); return true;
case "productUri": getOrCreateConfiguration((MiloBrowseComponent) component).setProductUri((java.lang.String) value); return true;
case "requestedPublishingInterval": getOrCreateConfiguration((MiloBrowseComponent) component).setRequestedPublishingInterval((java.lang.Double) value); return true;
case "requestTimeout": getOrCreateConfiguration((MiloBrowseComponent) component).setRequestTimeout((java.lang.Long) value); return true;
case "sessionName": getOrCreateConfiguration((MiloBrowseComponent) component).setSessionName((java.lang.String) value); return true;
case "sessionTimeout": getOrCreateConfiguration((MiloBrowseComponent) component).setSessionTimeout((java.lang.Long) value); return true;
default: return false;
}
}
}
} | MiloBrowseComponentBuilderImpl |
java | apache__maven | impl/maven-impl/src/test/java/org/apache/maven/impl/AbstractVersionTest.java | {
"start": 1021,
"end": 2818
} | class ____ {
protected static final int X_LT_Y = -1;
protected static final int X_EQ_Y = 0;
protected static final int X_GT_Y = 1;
protected abstract Version newVersion(String version);
protected void assertOrder(int expected, String version1, String version2) {
Version v1 = newVersion(version1);
Version v2 = newVersion(version2);
if (expected > 0) {
assertEquals(1, Integer.signum(v1.compareTo(v2)), "expected " + v1 + " > " + v2);
assertEquals(-1, Integer.signum(v2.compareTo(v1)), "expected " + v2 + " < " + v1);
assertNotEquals(v1, v2, "expected " + v1 + " != " + v2);
assertNotEquals(v2, v1, "expected " + v2 + " != " + v1);
} else if (expected < 0) {
assertEquals(-1, Integer.signum(v1.compareTo(v2)), "expected " + v1 + " < " + v2);
assertEquals(1, Integer.signum(v2.compareTo(v1)), "expected " + v2 + " > " + v1);
assertNotEquals(v1, v2, "expected " + v1 + " != " + v2);
assertNotEquals(v2, v1, "expected " + v2 + " != " + v1);
} else {
assertEquals(0, v1.compareTo(v2), "expected " + v1 + " == " + v2);
assertEquals(0, v2.compareTo(v1), "expected " + v2 + " == " + v1);
assertEquals(v1, v2, "expected " + v1 + " == " + v2);
assertEquals(v2, v1, "expected " + v2 + " == " + v1);
assertEquals(v1.hashCode(), v2.hashCode(), "expected #(" + v1 + ") == #(" + v1 + ")");
}
}
protected void assertSequence(String... versions) {
for (int i = 0; i < versions.length - 1; i++) {
for (int j = i + 1; j < versions.length; j++) {
assertOrder(X_LT_Y, versions[i], versions[j]);
}
}
}
}
| AbstractVersionTest |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/KotlinReflectionParameterNameDiscoverer.java | {
"start": 1379,
"end": 3143
} | class ____ implements ParameterNameDiscoverer {
@Override
public @Nullable String @Nullable [] getParameterNames(Method method) {
if (KotlinDetector.isKotlinType(method.getDeclaringClass())) {
try {
KFunction<?> function = ReflectJvmMapping.getKotlinFunction(method);
return (function != null ? getParameterNames(function.getParameters()) : null);
}
catch (UnsupportedOperationException ignored) {
}
}
return null;
}
@Override
public @Nullable String @Nullable [] getParameterNames(Constructor<?> ctor) {
if (!ctor.getDeclaringClass().isEnum() && KotlinDetector.isKotlinType(ctor.getDeclaringClass())) {
try {
KFunction<?> function = ReflectJvmMapping.getKotlinFunction(ctor);
if (function != null) {
return getParameterNames(function.getParameters());
}
}
catch (UnsupportedOperationException ignored) {
}
}
return null;
}
private @Nullable String @Nullable [] getParameterNames(List<KParameter> parameters) {
@Nullable String[] parameterNames = parameters.stream()
// Extension receivers of extension methods must be included as they appear as normal method parameters in Java
.filter(p -> KParameter.Kind.VALUE.equals(p.getKind()) || KParameter.Kind.EXTENSION_RECEIVER.equals(p.getKind()))
// extension receivers are not explicitly named, but require a name for Java interoperability
// $receiver is not a valid Kotlin identifier, but valid in Java, so it can be used here
.map(p -> KParameter.Kind.EXTENSION_RECEIVER.equals(p.getKind()) ? "$receiver" : p.getName())
.toArray(String[]::new);
for (String parameterName : parameterNames) {
if (parameterName == null) {
return null;
}
}
return parameterNames;
}
}
| KotlinReflectionParameterNameDiscoverer |
java | grpc__grpc-java | api/src/testFixtures/java/io/grpc/ForwardingTestUtil.java | {
"start": 2621,
"end": 4794
} | class ____ methods should be forwarded.
* @param mockDelegate The mockito mock of the delegate class.
* @param forwarder The forwarder object that forwards to the mockDelegate.
* @param skippedMethods A collection of methods that are skipped by the test.
* @param argProvider provides argument to be passed to tested forwarding methods.
*/
public static <T> void testMethodsForwarded(
Class<T> delegateClass,
T mockDelegate,
T forwarder,
Collection<Method> skippedMethods,
ArgumentProvider argProvider) throws Exception {
assertTrue(mockingDetails(mockDelegate).isMock());
assertFalse(mockingDetails(forwarder).isMock());
for (Method method : delegateClass.getDeclaredMethods()) {
if (Modifier.isStatic(method.getModifiers())
|| Modifier.isPrivate(method.getModifiers())
|| Modifier.isFinal(method.getModifiers())
|| skippedMethods.contains(method)) {
continue;
}
Class<?>[] argTypes = method.getParameterTypes();
Object[] args = new Object[argTypes.length];
for (int i = 0; i < argTypes.length; i++) {
if ((args[i] = argProvider.get(method, i, argTypes[i])) == null) {
args[i] = Defaults.defaultValue(argTypes[i]);
}
}
method.invoke(forwarder, args);
try {
method.invoke(verify(mockDelegate), args);
} catch (InvocationTargetException e) {
AssertionError ae =
new AssertionError(String.format("Method was not forwarded: %s", method));
ae.initCause(e);
throw ae;
}
}
boolean skipToString = false;
for (Method method : skippedMethods) {
if (method.getName().equals("toString")) {
skipToString = true;
break;
}
}
if (!skipToString) {
String actual = forwarder.toString();
String expected =
MoreObjects.toStringHelper(forwarder).add("delegate", mockDelegate).toString();
assertEquals("Method toString() was not forwarded properly", expected, actual);
}
}
/**
* Provides arguments for forwarded methods tested in {@link #testMethodsForwarded}.
*/
public | whose |
java | spring-projects__spring-boot | module/spring-boot-jdbc/src/test/java/org/springframework/boot/jdbc/autoconfigure/JdbcTemplateAutoConfigurationTests.java | {
"start": 9371,
"end": 9541
} | class ____ {
@Bean
DataSource customDataSource() {
return new TestDataSource();
}
}
@Configuration(proxyBeanMethods = false)
static | TestDataSourceConfiguration |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy-client/deployment/src/test/java/io/quarkus/restclient/configuration/GlobalConfigurationTest.java | {
"start": 667,
"end": 3672
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.withApplicationRoot(jar -> jar.addClasses(EchoClient.class, EchoResource.class, MyResponseFilter.class,
MyHostnameVerifier.class))
.withConfigurationResource("global-configuration-test-application.properties");
@Inject
RestClientsConfig configRoot;
@RestClient
EchoClient client;
@Test
void shouldHaveSingletonScope() {
BeanManager beanManager = Arc.container().beanManager();
Set<Bean<?>> beans = beanManager.getBeans(EchoClient.class, RestClient.LITERAL);
Bean<?> resolvedBean = beanManager.resolve(beans);
assertThat(resolvedBean.getScope()).isEqualTo(Singleton.class);
}
@Test
void shouldRespond() {
assertThat(client.echo("world")).contains("world");
}
@SuppressWarnings("OptionalGetWithoutIsPresent")
@Test
void checkGlobalConfigValues() {
// global properties:
assertThat(configRoot.multipartPostEncoderMode().get()).isEqualTo("HTML5");
assertThat(configRoot.disableContextualErrorMessages()).isTrue();
// global defaults for client specific properties:
assertThat(configRoot.proxyAddress().get()).isEqualTo("host:123");
assertThat(configRoot.proxyUser().get()).isEqualTo("proxyUser");
assertThat(configRoot.proxyPassword().get()).isEqualTo("proxyPassword");
assertThat(configRoot.nonProxyHosts().get()).isEqualTo("nonProxyHosts");
assertThat(configRoot.connectTimeout()).isEqualTo(2000);
assertThat(configRoot.readTimeout()).isEqualTo(2001);
assertThat(configRoot.userAgent().get()).isEqualTo("agent");
assertThat(configRoot.headers()).isEqualTo(Collections.singletonMap("foo", "bar"));
assertThat(configRoot.hostnameVerifier().get())
.isEqualTo("io.quarkus.restclient.configuration.MyHostnameVerifier");
assertThat(configRoot.connectionTTL().getAsInt()).isEqualTo(20000); // value in ms, will be converted to seconds
assertThat(configRoot.connectionPoolSize().getAsInt()).isEqualTo(2);
assertThat(configRoot.maxRedirects().getAsInt()).isEqualTo(2);
assertThat(configRoot.followRedirects().get()).isTrue();
assertThat(configRoot.providers().get())
.isEqualTo("io.quarkus.restclient.configuration.MyResponseFilter");
assertThat(configRoot.queryParamStyle().get()).isEqualTo(QueryParamStyle.MULTI_PAIRS);
assertThat(configRoot.trustStore().get()).isEqualTo("/path");
assertThat(configRoot.trustStorePassword().get()).isEqualTo("password");
assertThat(configRoot.trustStoreType().get()).isEqualTo("JKS");
assertThat(configRoot.keyStore().get()).isEqualTo("/path");
assertThat(configRoot.keyStorePassword().get()).isEqualTo("password");
assertThat(configRoot.keyStoreType().get()).isEqualTo("JKS");
}
}
| GlobalConfigurationTest |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/ColumnInfo.java | {
"start": 492,
"end": 824
} | interface ____ extends Writeable {
/*
static ColumnInfo fromXContent(XContentParser parser) {
return ColumnInfoImpl.PARSER.apply(parser, null);
}
*/
XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException;
String name();
String outputType();
}
| ColumnInfo |
java | processing__processing4 | core/src/processing/core/PApplet.java | {
"start": 338781,
"end": 344944
} | class ____> [sketch args]");
System.err.println("See the Javadoc for PApplet for an explanation.");
System.exit(1);
}
boolean external = false;
int[] location = null;
int[] editorLocation = null;
String name = null;
int windowColor = 0;
int stopColor = 0xff808080;
boolean hideStop = false;
int displayNum = -1; // use default
boolean present = false;
boolean fullScreen = false;
float uiScale = 0;
String param, value;
String folder = calcSketchPath();
int argIndex = 0;
label:
while (argIndex < args.length) {
int equals = args[argIndex].indexOf('=');
if (equals != -1) {
param = args[argIndex].substring(0, equals);
value = args[argIndex].substring(equals + 1);
//noinspection EnhancedSwitchMigration
switch (param) {
case ARGS_EDITOR_LOCATION:
external = true;
editorLocation = parseInt(split(value, ','));
break;
case ARGS_DISPLAY:
displayNum = parseInt(value, -2);
if (displayNum == -2) {
// this means the display value couldn't be parsed properly
System.err.println(value + " is not a valid choice for " + ARGS_DISPLAY);
displayNum = -1; // use the default
}
break;
case ARGS_DISABLE_AWT:
disableAWT = true;
break;
case ARGS_WINDOW_COLOR:
if (value.charAt(0) == '#' && value.length() == 7) {
value = value.substring(1);
windowColor = 0xff000000 | Integer.parseInt(value, 16);
} else {
System.err.println(ARGS_WINDOW_COLOR + " should be a # followed by six digits");
}
break;
case ARGS_STOP_COLOR:
if (value.charAt(0) == '#' && value.length() == 7) {
value = value.substring(1);
stopColor = 0xff000000 | Integer.parseInt(value, 16);
} else {
System.err.println(ARGS_STOP_COLOR + " should be a # followed by six digits");
}
break;
case ARGS_SKETCH_FOLDER:
folder = value;
break;
case ARGS_LOCATION:
location = parseInt(split(value, ','));
break;
case ARGS_UI_SCALE:
uiScale = parseFloat(value, 0);
if (uiScale == 0) {
System.err.println("Could not parse " + value + " for " + ARGS_UI_SCALE);
}
break;
}
} else {
switch (args[argIndex]) {
case ARGS_PRESENT:
present = true;
break;
case ARGS_HIDE_STOP:
hideStop = true;
break;
case ARGS_EXTERNAL:
external = true;
break;
case ARGS_FULL_SCREEN:
fullScreen = true;
break;
default:
name = args[argIndex];
break label; // because of break, argIndex won't increment again
}
}
argIndex++;
}
if (platform == WINDOWS) {
// Set DPI scaling to either 1 or 2, but avoid fractional
// settings such as 125% and 250% that make things look gross.
// Also applies to 300% since that is not even a thing.
// no longer possible to set prop after this line initializes AWT
//int dpi = java.awt.Toolkit.getDefaultToolkit().getScreenResolution();
// Attempt to get the resolution using a helper app. This code is
// fairly conservative: if there is trouble, we go with the default.
if (uiScale == 0) {
int dpi = getWindowsDPI();
if (dpi != 0) {
//uiScale = constrain(dpi / 96, 1, 2);
// If larger than 150% set scale to 2. Using scale 1 at 175% feels
// reeaally small. 150% is more of a tossup; it could also use 2.
uiScale = (dpi > 144) ? 2 : 1;
}
}
if (uiScale != 0) {
System.setProperty("sun.java2d.uiScale", String.valueOf(uiScale));
//} else {
//System.err.println("Could not identify Windows DPI, not setting sun.java2d.uiScale");
}
}
if (!disableAWT) {
ShimAWT.initRun();
}
final PApplet sketch;
if (constructedSketch != null) {
sketch = constructedSketch;
} else {
try {
Class<?> c =
Thread.currentThread().getContextClassLoader().loadClass(name);
sketch = (PApplet) c.getDeclaredConstructor().newInstance();
} catch (RuntimeException re) {
// Don't re-package runtime exceptions
throw re;
} catch (Exception e) {
// Package non-runtime exceptions so we can throw them freely
throw new RuntimeException(e);
}
}
// TODO When disabling AWT for LWJGL or others, we need to figure out
// how to make Cmd-Q and the rest of this still work properly.
if (platform == MACOS && !disableAWT) {
try {
final String td = "processing.core.ThinkDifferent";
Class<?> thinkDifferent =
Thread.currentThread().getContextClassLoader().loadClass(td);
Method method =
thinkDifferent.getMethod("init", PApplet.class);
method.invoke(null, sketch);
} catch (Exception e) {
e.printStackTrace(); // That's unfortunate
}
}
// Set the suggested display that's coming from the command line
// (and most likely, from the PDE's preference setting).
sketch.display = displayNum;
sketch.present = present;
sketch.fullScreen = fullScreen;
sketch.pixelDensity = sketch.displayDensity();
sketch.pixelDensityWarning = sketch.pixelDensity > 1;
// For 3.0.1, moved this above handleSettings() so that loadImage() can be
// used inside settings(). Sets a terrible precedent, but the alternative
// of not being able to size a sketch to an image is driving people loopy.
sketch.sketchPath = folder;
// Don't set 'args' to a zero-length array if it should be null [3.0a8]
if (args.length != argIndex + 1) {
// pass everything after the | name |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/TimezoneUtils.java | {
"start": 495,
"end": 558
} | class ____ dealing with Timezone related operations.
*/
public | for |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/emops/cascade/CascadePersistTest.java | {
"start": 617,
"end": 3108
} | class ____ {
@AfterEach
public void tearDown(EntityManagerFactoryScope scope) {
scope.getEntityManagerFactory().getSchemaManager().truncate();
}
@Test
public void testLazyCollectionsStayLazyOnPersist(EntityManagerFactoryScope scope) throws Exception {
scope.inEntityManager(
entityManager -> {
try {
entityManager.getTransaction().begin();
//initialize
A a = new A();
a.setName( "name1" );
entityManager.persist( a );
a = new A();
a.setName( "name2" );
entityManager.persist( a );
a = new A();
a.setName( "name3" );
entityManager.persist( a );
entityManager.flush();
a = entityManager.find( A.class, 1 );
for ( int i = 0; i < 3; i++ ) {
B1 b1 = new B1();
b1.setA( a );
entityManager.persist( b1 );
}
for ( int i = 0; i < 3; i++ ) {
B2 b2 = new B2();
b2.setA( a );
entityManager.persist( b2 );
}
for ( int i = 0; i < 3; i++ ) {
B3 b3 = new B3();
b3.setA( a );
entityManager.persist( b3 );
}
for ( int i = 0; i < 3; i++ ) {
B4 b4 = new B4();
b4.setA( a );
entityManager.persist( b4 );
}
entityManager.flush();
B1 b1 = entityManager.find( B1.class, 1 );
for ( int i = 0; i < 2; i++ ) {
C1 c1 = new C1();
c1.setB1( b1 );
entityManager.persist( c1 );
}
B2 b2 = entityManager.find( B2.class, 1 );
for ( int i = 0; i < 4; i++ ) {
C2 c2 = new C2();
c2.setB2( b2 );
entityManager.persist( c2 );
}
entityManager.flush();
entityManager.clear();
//test
a = entityManager.find( A.class, 1 );
C2 c2 = new C2();
for ( B2 anotherB2 : a.getB2List() ) {
if ( anotherB2.getId() == 1 ) {
anotherB2.getC2List().add( c2 );
c2.setB2( anotherB2 );
}
}
Statistics statistics = entityManager.unwrap(Session.class).getSessionFactory().getStatistics();
statistics.setStatisticsEnabled( true );
statistics.clear();
entityManager.persist( c2 );
long loaded = statistics.getEntityLoadCount();
assertEquals( 0, loaded );
entityManager.flush();
entityManager.getTransaction().rollback();
}
catch (Exception e) {
if ( entityManager.getTransaction().isActive() ) {
entityManager.getTransaction().rollback();
}
throw e;
}
}
);
}
}
| CascadePersistTest |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/annotation/MergedAnnotationsComposedOnSingleAnnotatedElementTests.java | {
"start": 8748,
"end": 9081
} | class ____ implements ComposedCacheInterface {
}
@FooCache(key = "fooKey")
@BarCache(key = "barKey")
private void multipleComposedCachesMethod() {
}
@Cacheable(cacheName = "fooCache", key = "fooKey")
@BarCache(key = "barKey")
private void composedPlusLocalCachesMethod() {
}
public | ComposedCacheOnInterfaceAndLocalCacheClass |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobTests.java | {
"start": 706,
"end": 2414
} | class ____ extends SimpleDiffableSerializationTestCase<RollupJob> {
@Override
protected Writeable.Reader<Diff<RollupJob>> diffReader() {
return RollupJob::readJobDiffFrom;
}
@Override
protected RollupJob doParseInstance(XContentParser parser) throws IOException {
return RollupJob.fromXContent(parser);
}
@Override
protected Writeable.Reader<RollupJob> instanceReader() {
return RollupJob::new;
}
@Override
protected RollupJob createTestInstance() {
if (randomBoolean()) {
return new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), null);
}
Map<String, String> headers = Collections.emptyMap();
if (randomBoolean()) {
headers = Maps.newMapWithExpectedSize(1);
headers.put("foo", "bar");
}
return new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), headers);
}
@Override
protected RollupJob mutateInstance(RollupJob instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
@Override
protected RollupJob makeTestChanges(RollupJob other) {
if (randomBoolean()) {
if (other.getHeaders().isEmpty()) {
Map<String, String> headers = Maps.newMapWithExpectedSize(1);
headers.put("foo", "bar");
return new RollupJob(other.getConfig(), headers);
} else {
return new RollupJob(other.getConfig(), null);
}
} else {
return new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), other.getHeaders());
}
}
}
| RollupJobTests |
java | netty__netty | microbench/src/main/java/io/netty/microbench/util/RecyclerBenchmark.java | {
"start": 1894,
"end": 2764
} | class ____ extends AbstractMicrobenchmark {
@Override
protected ChainedOptionsBuilder newOptionsBuilder() throws Exception {
return super.newOptionsBuilder().addProfiler("gc");
}
@Benchmark
public DummyObject plainNew() {
return new DummyObject();
}
@Benchmark
public DummyObject recyclerGetAndOrphan(ProducerConsumerState state) {
return state.recycler.get();
}
@Benchmark
public DummyObject recyclerGetAndRecycle(ProducerConsumerState state) {
DummyObject o = state.recycler.get();
o.recycle();
return o;
}
@Benchmark
public DummyObject recyclerGetAndUnguardedRecycle(ProducerConsumerState state) {
DummyObject o = state.recycler.get();
o.unguardedRecycle();
return o;
}
@State(Scope.Benchmark)
public static | RecyclerBenchmark |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/internals/metrics/OpenIterators.java | {
"start": 1359,
"end": 3158
} | class ____ {
private final TaskId taskId;
private final String metricsScope;
private final String name;
private final StreamsMetricsImpl streamsMetrics;
private final NavigableSet<MeteredIterator> openIterators = new ConcurrentSkipListSet<>(Comparator.comparingLong(MeteredIterator::startTimestamp));
private final AtomicLong oldestStartTimestamp = new AtomicLong();
private MetricName metricName;
public OpenIterators(final TaskId taskId,
final String metricsScope,
final String name,
final StreamsMetricsImpl streamsMetrics) {
this.taskId = taskId;
this.metricsScope = metricsScope;
this.name = name;
this.streamsMetrics = streamsMetrics;
}
public void add(final MeteredIterator iterator) {
openIterators.add(iterator);
updateOldestStartTimestamp();
if (openIterators.size() == 1) {
metricName = StateStoreMetrics.addOldestOpenIteratorGauge(taskId.toString(), metricsScope, name, streamsMetrics,
(config, now) -> oldestStartTimestamp.get()
);
}
}
public void remove(final MeteredIterator iterator) {
if (openIterators.size() == 1) {
streamsMetrics.removeStoreLevelMetric(metricName);
}
openIterators.remove(iterator);
updateOldestStartTimestamp();
}
public long sum() {
return openIterators.size();
}
private void updateOldestStartTimestamp() {
final Iterator<MeteredIterator> openIteratorsIterator = openIterators.iterator();
if (openIteratorsIterator.hasNext()) {
oldestStartTimestamp.set(openIteratorsIterator.next().startTimestamp());
}
}
}
| OpenIterators |
java | grpc__grpc-java | binder/src/main/java/io/grpc/binder/internal/PingTracker.java | {
"start": 2491,
"end": 3246
} | class ____ {
private final PingCallback callback;
private final Executor executor;
private final int id;
private final long startTimeNanos;
@GuardedBy("this")
private boolean done;
Ping(PingCallback callback, Executor executor, int id) {
this.callback = callback;
this.executor = executor;
this.id = id;
this.startTimeNanos = ticker.read();
}
private synchronized void fail(Status status) {
if (!done) {
done = true;
executor.execute(() -> callback.onFailure(status));
}
}
private synchronized void success() {
if (!done) {
done = true;
executor.execute(() -> callback.onSuccess(ticker.read() - startTimeNanos));
}
}
}
}
| Ping |
java | apache__flink | flink-formats/flink-orc/src/main/java/org/apache/flink/orc/OrcSplitReaderUtil.java | {
"start": 2166,
"end": 9530
} | class ____ {
/** Util for generating partitioned {@link OrcColumnarRowSplitReader}. */
public static OrcColumnarRowSplitReader<VectorizedRowBatch> genPartColumnarRowReader(
String hiveVersion,
Configuration conf,
String[] fullFieldNames,
DataType[] fullFieldTypes,
Map<String, Object> partitionSpec,
int[] selectedFields,
List<OrcFilters.Predicate> conjunctPredicates,
int batchSize,
Path path,
long splitStart,
long splitLength)
throws IOException {
List<String> nonPartNames = getNonPartNames(fullFieldNames, partitionSpec);
int[] selectedOrcFields =
getSelectedOrcFields(fullFieldNames, selectedFields, nonPartNames);
ColumnBatchGenerator<VectorizedRowBatch> gen =
(VectorizedRowBatch rowBatch) -> {
// create and initialize the row batch
ColumnVector[] vectors = new ColumnVector[selectedFields.length];
for (int i = 0; i < vectors.length; i++) {
String name = fullFieldNames[selectedFields[i]];
LogicalType type = fullFieldTypes[selectedFields[i]].getLogicalType();
vectors[i] =
partitionSpec.containsKey(name)
? createFlinkVectorFromConstant(
type, partitionSpec.get(name), batchSize)
: createFlinkVector(
rowBatch.cols[nonPartNames.indexOf(name)], type);
}
return new VectorizedColumnBatch(vectors);
};
return new OrcColumnarRowSplitReader<>(
OrcShim.createShim(hiveVersion),
conf,
convertToOrcTypeWithPart(fullFieldNames, fullFieldTypes, partitionSpec.keySet()),
selectedOrcFields,
gen,
conjunctPredicates,
batchSize,
path,
splitStart,
splitLength);
}
public static int[] getSelectedOrcFields(
String[] fullFieldNames, int[] selectedFields, List<String> nonPartNames) {
return Arrays.stream(selectedFields)
.mapToObj(i -> fullFieldNames[i])
.filter(nonPartNames::contains)
.mapToInt(nonPartNames::indexOf)
.toArray();
}
public static List<String> getNonPartNames(
String[] fullFieldNames, Collection<String> partitionKeys) {
return Arrays.stream(fullFieldNames)
.filter(n -> !partitionKeys.contains(n))
.collect(Collectors.toList());
}
public static List<String> getNonPartNames(
String[] fullFieldNames, Map<String, Object> partitionSpec) {
return Arrays.stream(fullFieldNames)
.filter(n -> !partitionSpec.containsKey(n))
.collect(Collectors.toList());
}
public static TypeDescription convertToOrcTypeWithPart(
String[] fullFieldNames, DataType[] fullFieldTypes, Collection<String> partitionKeys) {
return convertToOrcTypeWithPart(
fullFieldNames,
Arrays.stream(fullFieldTypes)
.map(DataType::getLogicalType)
.toArray(LogicalType[]::new),
partitionKeys);
}
public static TypeDescription convertToOrcTypeWithPart(
String[] fullFieldNames,
LogicalType[] fullFieldTypes,
Collection<String> partitionKeys) {
List<String> fullNameList = Arrays.asList(fullFieldNames);
String[] orcNames =
fullNameList.stream()
.filter(n -> !partitionKeys.contains(n))
.toArray(String[]::new);
LogicalType[] orcTypes =
Arrays.stream(orcNames)
.mapToInt(fullNameList::indexOf)
.mapToObj(i -> fullFieldTypes[i])
.toArray(LogicalType[]::new);
return logicalTypeToOrcType(RowType.of(orcTypes, orcNames));
}
/** See {@code org.apache.flink.table.catalog.hive.util.HiveTypeUtil}. */
public static TypeDescription logicalTypeToOrcType(LogicalType type) {
type = type.copy(true);
switch (type.getTypeRoot()) {
case CHAR:
return TypeDescription.createChar().withMaxLength(((CharType) type).getLength());
case VARCHAR:
int len = ((VarCharType) type).getLength();
if (len == VarCharType.MAX_LENGTH) {
return TypeDescription.createString();
} else {
return TypeDescription.createVarchar().withMaxLength(len);
}
case BOOLEAN:
return TypeDescription.createBoolean();
case VARBINARY:
if (type.equals(DataTypes.BYTES().getLogicalType())) {
return TypeDescription.createBinary();
} else {
throw new UnsupportedOperationException(
"Not support other binary type: " + type);
}
case DECIMAL:
DecimalType decimalType = (DecimalType) type;
return TypeDescription.createDecimal()
.withScale(decimalType.getScale())
.withPrecision(decimalType.getPrecision());
case TINYINT:
return TypeDescription.createByte();
case SMALLINT:
return TypeDescription.createShort();
case INTEGER:
return TypeDescription.createInt();
case BIGINT:
return TypeDescription.createLong();
case FLOAT:
return TypeDescription.createFloat();
case DOUBLE:
return TypeDescription.createDouble();
case DATE:
return TypeDescription.createDate();
case TIMESTAMP_WITHOUT_TIME_ZONE:
return TypeDescription.createTimestamp();
case ARRAY:
ArrayType arrayType = (ArrayType) type;
return TypeDescription.createList(logicalTypeToOrcType(arrayType.getElementType()));
case MAP:
MapType mapType = (MapType) type;
return TypeDescription.createMap(
logicalTypeToOrcType(mapType.getKeyType()),
logicalTypeToOrcType(mapType.getValueType()));
case ROW:
RowType rowType = (RowType) type;
TypeDescription struct = TypeDescription.createStruct();
for (int i = 0; i < rowType.getFieldCount(); i++) {
struct.addField(
rowType.getFieldNames().get(i),
logicalTypeToOrcType(rowType.getChildren().get(i)));
}
return struct;
default:
throw new UnsupportedOperationException("Unsupported type: " + type);
}
}
}
| OrcSplitReaderUtil |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/condition/ConsumesRequestConditionTests.java | {
"start": 1097,
"end": 8269
} | class ____ {
@Test
void consumesMatch() {
ConsumesRequestCondition condition = new ConsumesRequestCondition("text/plain");
MockHttpServletRequest request = new MockHttpServletRequest();
request.setContentType("text/plain");
assertThat(condition.getMatchingCondition(request)).isNotNull();
}
@Test
void negatedConsumesMatch() {
ConsumesRequestCondition condition = new ConsumesRequestCondition("!text/plain");
MockHttpServletRequest request = new MockHttpServletRequest();
request.setContentType("text/plain");
assertThat(condition.getMatchingCondition(request)).isNull();
}
@Test
void getConsumableMediaTypesNegatedExpression() {
ConsumesRequestCondition condition = new ConsumesRequestCondition("!application/xml");
assertThat(condition.getConsumableMediaTypes()).isEqualTo(Collections.emptySet());
}
@Test
void consumesWildcardMatch() {
ConsumesRequestCondition condition = new ConsumesRequestCondition("text/*");
MockHttpServletRequest request = new MockHttpServletRequest();
request.setContentType("text/plain");
assertThat(condition.getMatchingCondition(request)).isNotNull();
}
@Test
void consumesMultipleMatch() {
ConsumesRequestCondition condition = new ConsumesRequestCondition("text/plain", "application/xml");
MockHttpServletRequest request = new MockHttpServletRequest();
request.setContentType("text/plain");
assertThat(condition.getMatchingCondition(request)).isNotNull();
}
@Test
void consumesSingleNoMatch() {
ConsumesRequestCondition condition = new ConsumesRequestCondition("text/plain");
MockHttpServletRequest request = new MockHttpServletRequest();
request.setContentType("application/xml");
assertThat(condition.getMatchingCondition(request)).isNull();
}
@Test // gh-28024
public void matchWithParameters() {
String base = "application/hal+json";
ConsumesRequestCondition condition = new ConsumesRequestCondition(base + ";profile=\"a\"");
MockHttpServletRequest request = new MockHttpServletRequest();
request.setContentType(base + ";profile=\"a\"");
assertThat(condition.getMatchingCondition(request)).isNotNull();
condition = new ConsumesRequestCondition(base + ";profile=\"a\"");
request.setContentType(base + ";profile=\"b\"");
assertThat(condition.getMatchingCondition(request)).isNull();
condition = new ConsumesRequestCondition(base + ";profile=\"a\"");
request.setContentType(base);
assertThat(condition.getMatchingCondition(request)).isNotNull();
condition = new ConsumesRequestCondition(base);
request.setContentType(base + ";profile=\"a\"");
assertThat(condition.getMatchingCondition(request)).isNotNull();
condition = new ConsumesRequestCondition(base + ";profile=\"a\"");
request.setContentType(base + ";profile=\"A\"");
assertThat(condition.getMatchingCondition(request)).isNotNull();
}
@Test
void consumesParseError() {
ConsumesRequestCondition condition = new ConsumesRequestCondition("text/plain");
MockHttpServletRequest request = new MockHttpServletRequest();
request.setContentType("01");
assertThat(condition.getMatchingCondition(request)).isNull();
}
@Test
void consumesParseErrorWithNegation() {
ConsumesRequestCondition condition = new ConsumesRequestCondition("!text/plain");
MockHttpServletRequest request = new MockHttpServletRequest();
request.setContentType("01");
assertThat(condition.getMatchingCondition(request)).isNull();
}
@Test // gh-22010
public void consumesNoContent() {
ConsumesRequestCondition condition = new ConsumesRequestCondition("text/plain");
condition.setBodyRequired(false);
MockHttpServletRequest request = new MockHttpServletRequest();
assertThat(condition.getMatchingCondition(request)).isNotNull();
request = new MockHttpServletRequest();
request.addHeader(HttpHeaders.CONTENT_LENGTH, "0");
assertThat(condition.getMatchingCondition(request)).isNotNull();
request = new MockHttpServletRequest();
request.addHeader(HttpHeaders.CONTENT_LENGTH, "21");
assertThat(condition.getMatchingCondition(request)).isNull();
request = new MockHttpServletRequest();
request.addHeader(HttpHeaders.TRANSFER_ENCODING, "chunked");
assertThat(condition.getMatchingCondition(request)).isNull();
}
@Test
void compareToSingle() {
MockHttpServletRequest request = new MockHttpServletRequest();
ConsumesRequestCondition condition1 = new ConsumesRequestCondition("text/plain");
ConsumesRequestCondition condition2 = new ConsumesRequestCondition("text/*");
int result = condition1.compareTo(condition2, request);
assertThat(result).as("Invalid comparison result: " + result).isLessThan(0);
result = condition2.compareTo(condition1, request);
assertThat(result).as("Invalid comparison result: " + result).isGreaterThan(0);
}
@Test
void compareToMultiple() {
MockHttpServletRequest request = new MockHttpServletRequest();
ConsumesRequestCondition condition1 = new ConsumesRequestCondition("*/*", "text/plain");
ConsumesRequestCondition condition2 = new ConsumesRequestCondition("text/*", "text/plain;q=0.7");
int result = condition1.compareTo(condition2, request);
assertThat(result).as("Invalid comparison result: " + result).isLessThan(0);
result = condition2.compareTo(condition1, request);
assertThat(result).as("Invalid comparison result: " + result).isGreaterThan(0);
}
@Test
void combine() {
ConsumesRequestCondition condition1 = new ConsumesRequestCondition("text/plain");
ConsumesRequestCondition condition2 = new ConsumesRequestCondition("application/xml");
ConsumesRequestCondition result = condition1.combine(condition2);
assertThat(result).isEqualTo(condition2);
}
@Test
void combineWithDefault() {
ConsumesRequestCondition condition1 = new ConsumesRequestCondition("text/plain");
ConsumesRequestCondition condition2 = new ConsumesRequestCondition();
ConsumesRequestCondition result = condition1.combine(condition2);
assertThat(result).isEqualTo(condition1);
}
@Test
void parseConsumesAndHeaders() {
String[] consumes = new String[] {"text/plain"};
String[] headers = new String[]{"foo=bar", "content-type=application/xml,application/pdf"};
ConsumesRequestCondition condition = new ConsumesRequestCondition(consumes, headers);
assertConditions(condition, "text/plain", "application/xml", "application/pdf");
}
@Test
void getMatchingCondition() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.setContentType("text/plain");
ConsumesRequestCondition condition = new ConsumesRequestCondition("text/plain", "application/xml");
ConsumesRequestCondition result = condition.getMatchingCondition(request);
assertConditions(result, "text/plain");
condition = new ConsumesRequestCondition("application/xml");
result = condition.getMatchingCondition(request);
assertThat(result).isNull();
}
private void assertConditions(ConsumesRequestCondition condition, String... expected) {
Collection<ConsumeMediaTypeExpression> expressions = condition.getContent();
assertThat(expressions.stream().map(expr -> expr.getMediaType().toString()))
.containsExactlyInAnyOrder(expected);
}
}
| ConsumesRequestConditionTests |
java | spring-projects__spring-framework | spring-oxm/src/main/java/org/springframework/oxm/jaxb/Jaxb2Marshaller.java | {
"start": 33573,
"end": 35158
} | class ____ extends AttachmentMarshaller {
private final MimeContainer mimeContainer;
public Jaxb2AttachmentMarshaller(MimeContainer mimeContainer) {
this.mimeContainer = mimeContainer;
}
@Override
public String addMtomAttachment(byte[] data, int offset, int length, String mimeType,
String elementNamespace, String elementLocalName) {
ByteArrayDataSource dataSource = new ByteArrayDataSource(mimeType, data, offset, length);
return addMtomAttachment(new DataHandler(dataSource), elementNamespace, elementLocalName);
}
@Override
public String addMtomAttachment(DataHandler dataHandler, String elementNamespace, String elementLocalName) {
String host = getHost(elementNamespace, dataHandler);
String contentId = UUID.randomUUID() + "@" + host;
this.mimeContainer.addAttachment("<" + contentId + ">", dataHandler);
contentId = URLEncoder.encode(contentId, StandardCharsets.UTF_8);
return CID + contentId;
}
private String getHost(String elementNamespace, DataHandler dataHandler) {
try {
URI uri = ResourceUtils.toURI(elementNamespace);
return uri.getHost();
}
catch (URISyntaxException ignored) {
}
return dataHandler.getName();
}
@Override
public String addSwaRefAttachment(DataHandler dataHandler) {
String contentId = UUID.randomUUID() + "@" + dataHandler.getName();
this.mimeContainer.addAttachment(contentId, dataHandler);
return contentId;
}
@Override
public boolean isXOPPackage() {
return this.mimeContainer.convertToXopPackage();
}
}
private static | Jaxb2AttachmentMarshaller |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/ActiveProfiles.java | {
"start": 3055,
"end": 3392
} | class ____ be appended to the list of bean definition profiles defined by
* a test superclass or enclosing class. Thus, subclasses and nested classes
* have the option of <em>extending</em> the list of bean definition profiles.
* <p>If {@code inheritProfiles} is set to {@code false}, the bean definition
* profiles for the test | will |
java | quarkusio__quarkus | extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/reactive/ReactiveMongoDatabase.java | {
"start": 16287,
"end": 16700
} | class ____ decode each document into
* @param <T> the target document type of the iterable
* @return the stream of change events.
*/
<T> Multi<ChangeStreamDocument<T>> watch(ClientSession clientSession, Class<T> clazz);
/**
* Creates a change stream for this database.
*
* @param clientSession the client session with which to associate this operation
* @param clazz the | to |
java | spring-projects__spring-boot | core/spring-boot-docker-compose/src/test/java/org/springframework/boot/docker/compose/core/DockerCliCommandTests.java | {
"start": 1167,
"end": 5446
} | class ____ {
private static final ComposeVersion COMPOSE_VERSION = ComposeVersion.of("2.31.0");
@Test
void context() {
DockerCliCommand<?> command = new DockerCliCommand.Context();
assertThat(command.getType()).isEqualTo(DockerCliCommand.Type.DOCKER);
assertThat(command.getCommand(COMPOSE_VERSION)).containsExactly("context", "ls", "--format={{ json . }}");
assertThat(command.deserialize("[]")).isInstanceOf(List.class);
}
@Test
void inspect() {
DockerCliCommand<?> command = new DockerCliCommand.Inspect(List.of("123", "345"));
assertThat(command.getType()).isEqualTo(DockerCliCommand.Type.DOCKER);
assertThat(command.getCommand(COMPOSE_VERSION)).containsExactly("inspect", "--format={{ json . }}", "123",
"345");
assertThat(command.deserialize("[]")).isInstanceOf(List.class);
}
@Test
void composeConfig() {
DockerCliCommand<?> command = new DockerCliCommand.ComposeConfig();
assertThat(command.getType()).isEqualTo(DockerCliCommand.Type.DOCKER_COMPOSE);
assertThat(command.getCommand(COMPOSE_VERSION)).containsExactly("config", "--format=json");
assertThat(command.deserialize("{}")).isInstanceOf(DockerCliComposeConfigResponse.class);
}
@Test
void composePs() {
DockerCliCommand<?> command = new DockerCliCommand.ComposePs();
assertThat(command.getType()).isEqualTo(DockerCliCommand.Type.DOCKER_COMPOSE);
assertThat(command.getCommand(COMPOSE_VERSION)).containsExactly("ps", "--orphans=false", "--format=json");
assertThat(command.deserialize("[]")).isInstanceOf(List.class);
}
@Test
void composePsWhenLessThanV224() {
DockerCliCommand<?> command = new DockerCliCommand.ComposePs();
assertThat(command.getType()).isEqualTo(DockerCliCommand.Type.DOCKER_COMPOSE);
assertThat(command.getCommand(ComposeVersion.of("2.23"))).containsExactly("ps", "--format=json");
assertThat(command.deserialize("[]")).isInstanceOf(List.class);
}
@Test
void composeUp() {
DockerCliCommand<?> command = new DockerCliCommand.ComposeUp(LogLevel.INFO, List.of("--renew-anon-volumes"));
assertThat(command.getType()).isEqualTo(DockerCliCommand.Type.DOCKER_COMPOSE);
assertThat(command.getLogLevel()).isEqualTo(LogLevel.INFO);
assertThat(command.getCommand(COMPOSE_VERSION)).containsExactly("up", "--no-color", "--detach", "--wait",
"--renew-anon-volumes");
assertThat(command.deserialize("[]")).isSameAs(None.INSTANCE);
}
@Test
void composeDown() {
DockerCliCommand<?> command = new DockerCliCommand.ComposeDown(Duration.ofSeconds(1),
List.of("--remove-orphans"));
assertThat(command.getType()).isEqualTo(DockerCliCommand.Type.DOCKER_COMPOSE);
assertThat(command.getCommand(COMPOSE_VERSION)).containsExactly("down", "--timeout", "1", "--remove-orphans");
assertThat(command.deserialize("[]")).isSameAs(None.INSTANCE);
}
@Test
void composeStart() {
DockerCliCommand<?> command = new DockerCliCommand.ComposeStart(LogLevel.INFO, List.of("--dry-run"));
assertThat(command.getType()).isEqualTo(DockerCliCommand.Type.DOCKER_COMPOSE);
assertThat(command.getLogLevel()).isEqualTo(LogLevel.INFO);
assertThat(command.getCommand(COMPOSE_VERSION)).containsExactly("start", "--dry-run");
assertThat(command.deserialize("[]")).isSameAs(None.INSTANCE);
}
@Test
void composeStop() {
DockerCliCommand<?> command = new DockerCliCommand.ComposeStop(Duration.ofSeconds(1), List.of("--dry-run"));
assertThat(command.getType()).isEqualTo(DockerCliCommand.Type.DOCKER_COMPOSE);
assertThat(command.getCommand(COMPOSE_VERSION)).containsExactly("stop", "--timeout", "1", "--dry-run");
assertThat(command.deserialize("[]")).isSameAs(None.INSTANCE);
}
@Test
void composeVersionTests() {
ComposeVersion version = ComposeVersion.of("2.31.0-desktop");
assertThat(version.major()).isEqualTo(2);
assertThat(version.minor()).isEqualTo(31);
assertThat(version.isLessThan(1, 0)).isFalse();
assertThat(version.isLessThan(2, 0)).isFalse();
assertThat(version.isLessThan(2, 31)).isFalse();
assertThat(version.isLessThan(2, 32)).isTrue();
assertThat(version.isLessThan(3, 0)).isTrue();
ComposeVersion versionWithPrefix = ComposeVersion.of("v2.31.0-desktop");
assertThat(versionWithPrefix.major()).isEqualTo(2);
assertThat(versionWithPrefix.minor()).isEqualTo(31);
}
}
| DockerCliCommandTests |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/parallel/ParallelFlowable.java | {
"start": 1255,
"end": 1731
} | class ____ parallel publishing of events signaled to an array of {@link Subscriber}s.
* <p>
* Use {@link #from(Publisher)} to start processing a regular {@link Publisher} in 'rails'.
* Use {@link #runOn(Scheduler)} to introduce where each 'rail' should run on thread-vise.
* Use {@link #sequential()} to merge the sources back into a single {@link Flowable}.
*
* <p>History: 2.0.5 - experimental; 2.1 - beta
* @param <T> the value type
* @since 2.2
*/
public abstract | for |
java | quarkusio__quarkus | extensions/oidc-client-filter/deployment/src/test/java/io/quarkus/oidc/client/filter/ProtectedResource.java | {
"start": 291,
"end": 471
} | class ____ {
@Inject
Principal principal;
@GET
@RolesAllowed("user")
public String principalName() {
return principal.getName();
}
}
| ProtectedResource |
java | apache__flink | flink-state-backends/flink-statebackend-forst/src/main/java/org/apache/flink/state/forst/ForStDBTtlCompactFiltersManager.java | {
"start": 10909,
"end": 11323
} | class ____ implements FlinkCompactionFilter.TimeProvider {
private final TtlTimeProvider ttlTimeProvider;
private TimeProviderWrapper(TtlTimeProvider ttlTimeProvider) {
this.ttlTimeProvider = ttlTimeProvider;
}
@Override
public long currentTimestamp() {
return ttlTimeProvider.currentTimestamp();
}
}
private static | TimeProviderWrapper |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAppController.java | {
"start": 5969,
"end": 7005
} | class ____ rendering
*/
@Test
public void testGetJobCounters() {
when(job.checkAccess(any(UserGroupInformation.class), any(JobACL.class)))
.thenReturn(false);
appController.jobCounters();
verify(appController.response()).setContentType(MimeType.TEXT);
assertEquals(
"Access denied: User user does not have permission to view job job_01_01",
appController.getData());
when(job.checkAccess(any(UserGroupInformation.class), any(JobACL.class)))
.thenReturn(true);
appController.getProperty().remove(AMParams.JOB_ID);
appController.jobCounters();
assertEquals(
"Access denied: User user does not have permission to view job job_01_01Bad Request: Missing job ID",
appController.getData());
appController.getProperty().put(AMParams.JOB_ID, "job_01_01");
appController.jobCounters();
assertEquals(CountersPage.class, appController.getClazz());
}
/**
* Test method 'taskCounters'. Should print message about error or set CountersPage | for |
java | apache__flink | flink-connectors/flink-connector-files/src/main/java/org/apache/flink/connector/file/sink/compactor/operator/CompactorOperator.java | {
"start": 12575,
"end": 15394
} | class ____
implements SimpleVersionedSerializer<Map<Long, List<CompactorRequest>>> {
private static final int MAGIC_NUMBER = 0xa946be83;
private final CompactorRequestSerializer requestSerializer;
RemainingRequestsSerializer(CompactorRequestSerializer requestSerializer) {
this.requestSerializer = requestSerializer;
}
@Override
public int getVersion() {
return 1;
}
@Override
public byte[] serialize(Map<Long, List<CompactorRequest>> remainingRequests)
throws IOException {
DataOutputSerializer out = new DataOutputSerializer(256);
out.writeInt(MAGIC_NUMBER);
serializeV1(remainingRequests, out);
return out.getCopyOfBuffer();
}
@Override
public Map<Long, List<CompactorRequest>> deserialize(int version, byte[] serialized)
throws IOException {
DataInputDeserializer in = new DataInputDeserializer(serialized);
switch (version) {
case 1:
validateMagicNumber(in);
return deserializeV1(in);
default:
throw new IOException("Unrecognized version or corrupt state: " + version);
}
}
private void serializeV1(
Map<Long, List<CompactorRequest>> request, DataOutputSerializer out)
throws IOException {
out.writeInt(request.size());
for (Map.Entry<Long, List<CompactorRequest>> e : request.entrySet()) {
out.writeLong(e.getKey());
SimpleVersionedSerialization.writeVersionAndSerializeList(
requestSerializer, e.getValue(), out);
}
}
private Map<Long, List<CompactorRequest>> deserializeV1(DataInputDeserializer in)
throws IOException {
int size = in.readInt();
Map<Long, List<CompactorRequest>> requestMap = new HashMap<>(size);
for (int i = 0; i < size; i++) {
long cpId = in.readLong();
List<CompactorRequest> requests =
SimpleVersionedSerialization.readVersionAndDeserializeList(
requestSerializer, in);
requestMap.put(cpId, requests);
}
return requestMap;
}
private static void validateMagicNumber(DataInputView in) throws IOException {
int magicNumber = in.readInt();
if (magicNumber != MAGIC_NUMBER) {
throw new IOException(
String.format("Corrupt data: Unexpected magic number %08X", magicNumber));
}
}
}
}
| RemainingRequestsSerializer |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/context/ContextLoaderTests.java | {
"start": 18361,
"end": 18480
} | interface ____ extends ConfigurableApplicationContext {
void unheardOf();
}
private static | UnknownApplicationContext |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/aot/BeanDefinitionMethodGeneratorFactoryTests.java | {
"start": 12291,
"end": 12515
} | class ____ implements BeanRegistrationAotProcessor {
@Override
public BeanRegistrationAotContribution processAheadOfTime(RegisteredBean registeredBean) {
return null;
}
}
static | TestBeanRegistrationAotProcessorBean |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/volume/csi/event/ControllerPublishVolumeEvent.java | {
"start": 1009,
"end": 1196
} | class ____ extends VolumeEvent {
public ControllerPublishVolumeEvent(Volume volume) {
super(volume, VolumeEventType.CONTROLLER_PUBLISH_VOLUME_EVENT);
}
}
| ControllerPublishVolumeEvent |
java | apache__hadoop | hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ZombieCluster.java | {
"start": 1243,
"end": 5276
} | class ____ extends AbstractClusterStory {
private Node root;
/**
* Construct a homogeneous cluster. We assume that the leaves on the topology
* are {@link MachineNode}s, and the parents of {@link MachineNode}s are
* {@link RackNode}s. We also expect all leaf nodes are on the same level.
*
* @param topology
* The network topology.
* @param defaultNode
* The default node setting.
*/
public ZombieCluster(LoggedNetworkTopology topology, MachineNode defaultNode) {
buildCluster(topology, defaultNode);
}
/**
* Construct a homogeneous cluster. We assume that the leaves on the topology
* are {@link MachineNode}s, and the parents of {@link MachineNode}s are
* {@link RackNode}s. We also expect all leaf nodes are on the same level.
*
* @param path Path to the JSON-encoded topology file.
* @param conf
* @param defaultNode
* The default node setting.
* @throws IOException
*/
public ZombieCluster(Path path, MachineNode defaultNode, Configuration conf) throws IOException {
this(new ClusterTopologyReader(path, conf).get(), defaultNode);
}
/**
* Construct a homogeneous cluster. We assume that the leaves on the topology
* are {@link MachineNode}s, and the parents of {@link MachineNode}s are
* {@link RackNode}s. We also expect all leaf nodes are on the same level.
*
* @param input The input stream for the JSON-encoded topology file.
* @param defaultNode
* The default node setting.
* @throws IOException
*/
public ZombieCluster(InputStream input, MachineNode defaultNode) throws IOException {
this(new ClusterTopologyReader(input).get(), defaultNode);
}
@Override
public Node getClusterTopology() {
return root;
}
private final void buildCluster(LoggedNetworkTopology topology,
MachineNode defaultNode) {
Map<LoggedNetworkTopology, Integer> levelMapping =
new IdentityHashMap<LoggedNetworkTopology, Integer>();
Deque<LoggedNetworkTopology> unvisited =
new ArrayDeque<LoggedNetworkTopology>();
unvisited.add(topology);
levelMapping.put(topology, 0);
// building levelMapping and determine leafLevel
int leafLevel = -1; // -1 means leafLevel unknown.
for (LoggedNetworkTopology n = unvisited.poll(); n != null;
n = unvisited.poll()) {
int level = levelMapping.get(n);
List<LoggedNetworkTopology> children = n.getChildren();
if (children == null || children.isEmpty()) {
if (leafLevel == -1) {
leafLevel = level;
} else if (leafLevel != level) {
throw new IllegalArgumentException(
"Leaf nodes are not on the same level");
}
} else {
for (LoggedNetworkTopology child : children) {
levelMapping.put(child, level + 1);
unvisited.addFirst(child);
}
}
}
/**
* A second-pass dfs traverse of topology tree. path[i] contains the parent
* of the node at level i+1.
*/
Node[] path = new Node[leafLevel];
unvisited.add(topology);
for (LoggedNetworkTopology n = unvisited.poll(); n != null;
n = unvisited.poll()) {
int level = levelMapping.get(n);
Node current;
if (level == leafLevel) { // a machine node
MachineNode.Builder builder =
new MachineNode.Builder(n.getName().getValue(), level);
if (defaultNode != null) {
builder.cloneFrom(defaultNode);
}
current = builder.build();
} else {
current = (level == leafLevel - 1)
? new RackNode(n.getName().getValue(), level) :
new Node(n.getName().getValue(), level);
path[level] = current;
// Add all children to the front of the queue.
for (LoggedNetworkTopology child : n.getChildren()) {
unvisited.addFirst(child);
}
}
if (level != 0) {
path[level - 1].addChild(current);
}
}
root = path[0];
}
}
| ZombieCluster |
java | quarkusio__quarkus | extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/intrumentation/vertx/OpenTelemetryVertxTracer.java | {
"start": 474,
"end": 3395
} | class ____
implements VertxTracer<OpenTelemetryVertxTracer.SpanOperation, OpenTelemetryVertxTracer.SpanOperation> {
private final List<InstrumenterVertxTracer<?, ?>> instrumenterVertxTracers;
public OpenTelemetryVertxTracer(final List<InstrumenterVertxTracer<?, ?>> instrumenterVertxTracers) {
this.instrumenterVertxTracers = Collections.unmodifiableList(instrumenterVertxTracers);
}
@Override
public <R> SpanOperation receiveRequest(
final Context context,
final SpanKind kind,
final TracingPolicy policy,
final R request,
final String operation,
final Iterable<Map.Entry<String, String>> headers,
final TagExtractor<R> tagExtractor) {
return getTracer(request, tagExtractor).receiveRequest(context, kind, policy, request, operation, headers,
tagExtractor);
}
@Override
public <R> void sendResponse(
final Context context,
final R response,
final SpanOperation spanOperation,
final Throwable failure,
final TagExtractor<R> tagExtractor) {
getTracer(spanOperation, tagExtractor).sendResponse(context, response, spanOperation, failure, tagExtractor);
}
@Override
public <R> SpanOperation sendRequest(
final Context context,
final SpanKind kind,
final TracingPolicy policy,
final R request,
final String operation,
final BiConsumer<String, String> headers,
final TagExtractor<R> tagExtractor) {
return getTracer(request, tagExtractor).sendRequest(context, kind, policy, request, operation, headers, tagExtractor);
}
@Override
public <R> void receiveResponse(
final Context context,
final R response,
final SpanOperation spanOperation,
final Throwable failure,
final TagExtractor<R> tagExtractor) {
getTracer(spanOperation, tagExtractor).receiveResponse(context, response, spanOperation, failure, tagExtractor);
}
@SuppressWarnings("unchecked")
private <R> VertxTracer<SpanOperation, SpanOperation> getTracer(
final R request,
final TagExtractor<R> tagExtractor) {
for (InstrumenterVertxTracer<?, ?> instrumenterVertxTracer : instrumenterVertxTracers) {
if (instrumenterVertxTracer.canHandle(request, tagExtractor)) {
return instrumenterVertxTracer;
}
}
return NOOP;
}
@SuppressWarnings("unchecked")
private <R> VertxTracer<SpanOperation, SpanOperation> getTracer(final SpanOperation spanOperation,
final TagExtractor<R> tagExtractor) {
return spanOperation != null ? getTracer((R) spanOperation.getRequest(), tagExtractor) : NOOP;
}
static | OpenTelemetryVertxTracer |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/comparable/AbstractUniversalComparableAssert_isGreaterThan_Test.java | {
"start": 829,
"end": 1228
} | class ____ extends AbstractUniversalComparableAssertBaseTest {
@Override
protected UniversalComparableAssert<String> invoke_api_method() {
return assertions.isGreaterThan("bcd");
}
@Override
protected void verify_internal_effects() {
verify(comparables).assertGreaterThan(getInfo(assertions), getActual(assertions), "bcd");
}
}
| AbstractUniversalComparableAssert_isGreaterThan_Test |
java | apache__camel | core/camel-xml-jaxp/src/main/java/org/apache/camel/support/processor/validation/NoXmlBodyValidationException.java | {
"start": 1051,
"end": 1509
} | class ____ extends ValidationException {
private static final @Serial long serialVersionUID = 4502520681354358599L;
public NoXmlBodyValidationException(Exchange exchange) {
super(exchange, "No XML body could be found on the input message");
}
public NoXmlBodyValidationException(Exchange exchange, Throwable cause) {
super("No XML body could be found on the input message", exchange, cause);
}
}
| NoXmlBodyValidationException |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/BigStringFieldTest_private.java | {
"start": 318,
"end": 3224
} | class ____ extends TestCase {
public void test_bigFieldString() throws Exception {
Model model = new Model();
model.f0 = random(1024);
model.f1 = random(1024);
model.f2 = random(1024);
model.f3 = random(1024);
model.f4 = random(1024);
String text = JSON.toJSONString(model);
Model model2 = JSON.parseObject(text, Model.class);
assertEquals(model2.f0, model.f0);
assertEquals(model2.f1, model.f1);
assertEquals(model2.f2, model.f2);
assertEquals(model2.f3, model.f3);
assertEquals(model2.f4, model.f4);
}
public void test_list() throws Exception {
List<Model> list = new ArrayList<Model>();
for (int i = 0; i < 1000; ++i) {
Model model = new Model();
model.f0 = random(64);
model.f1 = random(64);
model.f2 = random(64);
model.f3 = random(64);
model.f4 = random(64);
list.add(model);
}
String text = JSON.toJSONString(list);
List<Model> list2 = JSON.parseObject(text, new TypeReference<List<Model>>() {});
assertEquals(list.size(), list2.size());
for (int i = 0; i < 1000; ++i) {
assertEquals(list.get(i).f0, list2.get(i).f0);
assertEquals(list.get(i).f1, list2.get(i).f1);
assertEquals(list.get(i).f2, list2.get(i).f2);
assertEquals(list.get(i).f3, list2.get(i).f3);
assertEquals(list.get(i).f4, list2.get(i).f4);
}
}
public void test_list_browserSecure() throws Exception {
List<Model> list = new ArrayList<Model>();
for (int i = 0; i < 1000; ++i) {
Model model = new Model();
model.f0 = random(64);
model.f1 = random(64);
model.f2 = random(64);
model.f3 = random(64);
model.f4 = random(64);
list.add(model);
}
String text = JSON.toJSONString(list, SerializerFeature.BrowserSecure);
List<Model> list2 = JSON.parseObject(text, new TypeReference<List<Model>>() {});
assertEquals(list.size(), list2.size());
for (int i = 0; i < 1000; ++i) {
assertEquals(list.get(i).f0, list2.get(i).f0);
assertEquals(list.get(i).f1, list2.get(i).f1);
assertEquals(list.get(i).f2, list2.get(i).f2);
assertEquals(list.get(i).f3, list2.get(i).f3);
assertEquals(list.get(i).f4, list2.get(i).f4);
}
}
public String random(int count) {
Random random = new Random();
char[] chars = new char[count];
for (int i = 0; i < count; ++i) {
chars[i] = (char) random.nextInt();
}
return new String(chars);
}
private static | BigStringFieldTest_private |
java | spring-projects__spring-boot | build-plugin/spring-boot-maven-plugin/src/main/java/org/springframework/boot/maven/AbstractAotMojo.java | {
"start": 7096,
"end": 7902
} | class ____ implements DiagnosticListener<JavaFileObject> {
private final StringBuilder message = new StringBuilder();
@Override
public void report(Diagnostic<? extends JavaFileObject> diagnostic) {
if (diagnostic.getKind() == Diagnostic.Kind.ERROR) {
this.message.append("\n");
this.message.append(diagnostic.getMessage(Locale.getDefault()));
if (diagnostic.getSource() != null) {
this.message.append(" ");
this.message.append(diagnostic.getSource().getName());
this.message.append(" ");
this.message.append(diagnostic.getLineNumber()).append(":").append(diagnostic.getColumnNumber());
}
}
}
boolean hasReportedErrors() {
return !this.message.isEmpty();
}
@Override
public String toString() {
return this.message.toString();
}
}
}
| Errors |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutorTests.java | {
"start": 5280,
"end": 6054
} | class ____ extends EsThreadPoolExecutor {
final AtomicReference<Exception> lastLoggedException = new AtomicReference<>();
ThrowingEsThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, RuntimeException exception) {
super(name, corePoolSize, maximumPoolSize, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>() {
@Override
public boolean offer(Runnable r) {
throw exception;
}
}, TestEsExecutors.testOnlyDaemonThreadFactory("test"), new ThreadContext(Settings.EMPTY));
}
@Override
void logException(AbstractRunnable task, Exception e) {
lastLoggedException.set(e);
}
}
}
| ThrowingEsThreadPoolExecutor |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/injection/guice/InjectorImpl.java | {
"start": 18694,
"end": 24706
} | class ____ {
final Map<TypeLiteral<?>, List<Binding<?>>> multimap = new HashMap<>();
<T> void put(TypeLiteral<T> type, Binding<T> binding) {
multimap.computeIfAbsent(type, k -> new ArrayList<>()).add(binding);
}
@SuppressWarnings("unchecked")
// safe because we only put matching entries into the map
<T> List<Binding<T>> getAll(TypeLiteral<T> type) {
List<Binding<?>> bindings = multimap.get(type);
return bindings != null ? Collections.<Binding<T>>unmodifiableList((List) multimap.get(type)) : Collections.emptyList();
}
}
/**
* Returns parameter injectors, or {@code null} if there are no parameters.
*/
SingleParameterInjector<?>[] getParametersInjectors(List<Dependency<?>> parameters, Errors errors) throws ErrorsException {
if (parameters.isEmpty()) {
return null;
}
int numErrorsBefore = errors.size();
SingleParameterInjector<?>[] result = new SingleParameterInjector<?>[parameters.size()];
int i = 0;
for (Dependency<?> parameter : parameters) {
try {
result[i++] = createParameterInjector(parameter, errors.withSource(parameter));
} catch (ErrorsException rethrownBelow) {
// rethrown below
}
}
errors.throwIfNewErrors(numErrorsBefore);
return result;
}
<T> SingleParameterInjector<T> createParameterInjector(final Dependency<T> dependency, final Errors errors) throws ErrorsException {
InternalFactory<? extends T> factory = getInternalFactory(dependency.getKey(), errors);
return new SingleParameterInjector<>(dependency, factory);
}
/**
* Cached constructor injectors for each type
*/
ConstructorInjectorStore constructors = new ConstructorInjectorStore(this);
/**
* Cached field and method injectors for each type.
*/
MembersInjectorStore membersInjectorStore;
private <T> Provider<T> getProvider(Class<T> type) {
return getProvider(Key.get(type));
}
<T> Provider<T> getProviderOrThrow(final Key<T> key, Errors errors) throws ErrorsException {
final InternalFactory<? extends T> factory = getInternalFactory(key, errors);
// ES: optimize for a common case of read only instance getting from the parent...
if (factory instanceof InternalFactory.Instance) {
return () -> {
try {
return factory.get(null, null, null);
} catch (ErrorsException e) {
// ignore
}
// should never happen...
assert false;
return null;
};
}
final Dependency<T> dependency = Dependency.get(key);
return new Provider<>() {
@Override
public T get() {
final Errors errors = new Errors(dependency);
try {
T t = callInContext((ContextualCallable<T>) context -> {
context.setDependency(dependency);
try {
return factory.get(errors, context, dependency);
} finally {
context.setDependency(null);
}
});
errors.throwIfNewErrors(0);
return t;
} catch (ErrorsException e) {
throw new ProvisionException(errors.merge(e.getErrors()).getMessages());
}
}
@Override
public String toString() {
return factory.toString();
}
};
}
@Override
public <T> Provider<T> getProvider(final Key<T> key) {
Errors errors = new Errors(key);
try {
Provider<T> result = getProviderOrThrow(key, errors);
errors.throwIfNewErrors(0);
return result;
} catch (ErrorsException e) {
throw new ConfigurationException(errors.merge(e.getErrors()).getMessages());
}
}
@Override
public <T> T getInstance(Key<T> key) {
return getProvider(key).get();
}
@Override
public <T> T getInstance(Class<T> type) {
return getProvider(type).get();
}
private final ThreadLocal<Object[]> localContext;
/**
* Looks up thread local context. Creates (and removes) a new context if necessary.
*/
<T> T callInContext(ContextualCallable<T> callable) throws ErrorsException {
Object[] reference = localContext.get();
if (reference == null) {
reference = new Object[1];
localContext.set(reference);
}
if (reference[0] == null) {
reference[0] = new InternalContext();
try {
return callable.call((InternalContext) reference[0]);
} finally {
// Only clear the context if this call created it.
reference[0] = null;
}
} else {
// Someone else will clean up this context.
return callable.call((InternalContext) reference[0]);
}
}
@Override
public String toString() {
return new ToStringBuilder(Injector.class).add("bindings", state.getExplicitBindingsThisLevel().values()).toString();
}
// ES_GUICE: clear caches
public void clearCache() {
state.clearBlacklisted();
constructors = new ConstructorInjectorStore(this);
membersInjectorStore = new MembersInjectorStore(this);
jitBindings = new HashMap<>();
}
// ES_GUICE: make all registered bindings act as eager singletons
public void readOnlyAllSingletons() {
state.makeAllBindingsToEagerSingletons(this);
bindingsMultimap = new BindingsMultimap();
// reindex the bindings
index();
}
}
| BindingsMultimap |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/tuple/entity/EntityMetamodel.java | {
"start": 29717,
"end": 31452
} | class ____ which to resolve the entity-name.
// * @return The mapped entity-name, or null if no such mapping was found.
// */
// public String findEntityNameByEntityClass(Class<?> inheritanceClass) {
// return entityNameByInheritanceClassMap.get( inheritanceClass );
// }
@Override
public String toString() {
return "EntityMetamodel(" + name + ':' + ArrayHelper.toString( properties ) + ')';
}
// temporary ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
public String[] getPropertyNames() {
return propertyNames;
}
public Type[] getPropertyTypes() {
return propertyTypes;
}
public @Nullable Type[] getDirtyCheckablePropertyTypes() {
return dirtyCheckablePropertyTypes;
}
public boolean[] getPropertyLaziness() {
return propertyLaziness;
}
public boolean[] getPropertyUpdateability() {
return propertyUpdateability;
}
public boolean[] getPropertyCheckability() {
return propertyCheckability;
}
public boolean[] getNonlazyPropertyUpdateability() {
return nonlazyPropertyUpdateability;
}
public boolean[] getPropertyInsertability() {
return propertyInsertability;
}
public boolean[] getPropertyNullability() {
return propertyNullability;
}
public boolean[] getPropertyVersionability() {
return propertyVersionability;
}
public CascadeStyle[] getCascadeStyles() {
return cascadeStyles;
}
public boolean hasPreInsertGeneratedValues() {
return hasPreInsertGeneratedValues;
}
public boolean hasPreUpdateGeneratedValues() {
return hasPreUpdateGeneratedValues;
}
public boolean hasInsertGeneratedValues() {
return hasInsertGeneratedValues;
}
public boolean hasUpdateGeneratedValues() {
return hasUpdateGeneratedValues;
}
/**
* Whether this | for |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/jobgraph/ExecutionPlanUtils.java | {
"start": 1389,
"end": 4215
} | enum ____ {
;
private static final Logger LOG = LoggerFactory.getLogger(ExecutionPlanUtils.class);
public static Map<String, DistributedCache.DistributedCacheEntry> prepareUserArtifactEntries(
Map<String, DistributedCache.DistributedCacheEntry> userArtifacts, JobID jobId) {
final Map<String, DistributedCache.DistributedCacheEntry> result = new HashMap<>();
if (userArtifacts != null && !userArtifacts.isEmpty()) {
try {
java.nio.file.Path tmpDir =
Files.createTempDirectory("flink-distributed-cache-" + jobId);
for (Map.Entry<String, DistributedCache.DistributedCacheEntry> originalEntry :
userArtifacts.entrySet()) {
Path filePath = new Path(originalEntry.getValue().filePath);
boolean isLocalDir = false;
try {
FileSystem sourceFs = filePath.getFileSystem();
isLocalDir =
!sourceFs.isDistributedFS()
&& sourceFs.getFileStatus(filePath).isDir();
} catch (IOException ioe) {
LOG.warn(
"Could not determine whether {} denotes a local path.",
filePath,
ioe);
}
// zip local directories because we only support file uploads
DistributedCache.DistributedCacheEntry entry;
if (isLocalDir) {
Path zip =
FileUtils.compressDirectory(
filePath,
new Path(tmpDir.toString(), filePath.getName() + ".zip"));
entry =
new DistributedCache.DistributedCacheEntry(
zip.toString(),
originalEntry.getValue().isExecutable,
true);
} else {
entry =
new DistributedCache.DistributedCacheEntry(
filePath.toString(),
originalEntry.getValue().isExecutable,
false);
}
result.put(originalEntry.getKey(), entry);
}
} catch (IOException ioe) {
throw new FlinkRuntimeException(
"Could not compress distributed-cache artifacts.", ioe);
}
}
return result;
}
}
| ExecutionPlanUtils |
java | netty__netty | transport/src/test/java/io/netty/channel/SingleThreadEventLoopTest.java | {
"start": 19130,
"end": 19977
} | class ____ extends SingleThreadEventLoop {
SingleThreadEventLoopB() {
super(null, Executors.defaultThreadFactory(), false);
}
@Override
protected void run() {
for (;;) {
try {
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(delayNanos(System.nanoTime())));
} catch (InterruptedException e) {
// Waken up by interruptThread()
}
runTasks0();
if (confirmShutdown()) {
break;
}
}
}
protected void runTasks0() {
runAllTasks();
}
@Override
protected void wakeup(boolean inEventLoop) {
interruptThread();
}
}
private static final | SingleThreadEventLoopB |
java | apache__dubbo | dubbo-metrics/dubbo-metrics-default/src/test/java/org/apache/dubbo/metrics/collector/DefaultCollectorTest.java | {
"start": 3326,
"end": 14827
} | class ____ {
private ApplicationModel applicationModel;
private String interfaceName;
private String methodName;
private String group;
private String version;
private RpcInvocation invocation;
private String side;
MetricsDispatcher metricsDispatcher;
DefaultMetricsCollector defaultCollector;
MetricsFilter metricsFilter;
@BeforeEach
public void setup() {
FrameworkModel frameworkModel = FrameworkModel.defaultModel();
applicationModel = frameworkModel.newApplication();
ApplicationConfig config = new ApplicationConfig();
config.setName("MockMetrics");
MetricsConfig metricsConfig = new MetricsConfig();
metricsConfig.setEnableRpc(true);
applicationModel.getApplicationConfigManager().setApplication(config);
applicationModel.getApplicationConfigManager().setMetrics(metricsConfig);
metricsDispatcher = applicationModel.getBeanFactory().getOrRegisterBean(MetricsDispatcher.class);
defaultCollector = applicationModel.getBeanFactory().getBean(DefaultMetricsCollector.class);
defaultCollector.setCollectEnabled(true);
interfaceName = "org.apache.dubbo.MockInterface";
methodName = "mockMethod";
group = "mockGroup";
version = "1.0.0";
invocation = new RpcInvocation(methodName, interfaceName, "serviceKey", null, null);
invocation.setTargetServiceUniqueName(group + "/" + interfaceName + ":" + version);
invocation.setAttachment(GROUP_KEY, group);
invocation.setAttachment(VERSION_KEY, version);
side = CommonConstants.CONSUMER;
invocation.setInvoker(new TestMetricsInvoker(side));
invocation.setTargetServiceUniqueName(group + "/" + interfaceName + ":" + version);
RpcContext.getServiceContext()
.setUrl(URL.valueOf("test://test:11/test?accesslog=true&group=dubbo&version=1.1&side=" + side));
metricsFilter = new MetricsFilter();
metricsFilter.setApplicationModel(applicationModel);
}
@Test
void testListener() {
DefaultMetricsCollector metricsCollector = new DefaultMetricsCollector(applicationModel);
RequestEvent event = RequestEvent.toRequestEvent(
applicationModel,
null,
null,
null,
invocation,
MetricsSupport.getSide(invocation),
MethodMetric.isServiceLevel(applicationModel));
RequestEvent beforeEvent = RequestEvent.toRequestErrorEvent(
applicationModel,
null,
null,
invocation,
MetricsSupport.getSide(invocation),
RpcException.FORBIDDEN_EXCEPTION,
MethodMetric.isServiceLevel(applicationModel));
Assertions.assertTrue(metricsCollector.isSupport(event));
Assertions.assertTrue(metricsCollector.isSupport(beforeEvent));
}
@AfterEach
public void teardown() {
applicationModel.destroy();
}
/**
* No rt metrics because Aggregate calc
*/
@Test
void testRequestEventNoRt() {
applicationModel.getBeanFactory().getOrRegisterBean(MetricsDispatcher.class);
DefaultMetricsCollector collector =
applicationModel.getBeanFactory().getOrRegisterBean(DefaultMetricsCollector.class);
collector.setCollectEnabled(true);
ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(DefaultCollectorTest.class);
logger.warn("0-99", "", "", "Test error code message.");
metricsFilter.invoke(new TestMetricsInvoker(side), invocation);
try {
Thread.sleep(50);
} catch (InterruptedException e) {
e.printStackTrace();
}
AppResponse mockRpcResult = new AppResponse();
// mockRpcResult.setException(new RpcException("hessian"));
Result result = AsyncRpcResult.newDefaultAsyncResult(mockRpcResult, invocation);
metricsFilter.onResponse(result, new TestMetricsInvoker(side), invocation);
RequestEvent eventObj = (RequestEvent) invocation.get(METRIC_FILTER_EVENT);
long c1 = eventObj.getTimePair().calc();
// push finish rt +1
List<MetricSample> metricSamples = collector.collect();
// num(total+success+processing) + rt(5) + error code = 9
Assertions.assertEquals(metricSamples.size(), 9);
List<String> metricsNames =
metricSamples.stream().map(MetricSample::getName).collect(Collectors.toList());
// No error will contain total+success+processing
String REQUESTS =
new MetricsKeyWrapper(METRIC_REQUESTS, MetricsPlaceValue.of(side, MetricsLevel.SERVICE)).targetKey();
String SUCCEED = new MetricsKeyWrapper(
METRIC_REQUESTS_SUCCEED, MetricsPlaceValue.of(side, MetricsLevel.SERVICE))
.targetKey();
String PROCESSING = new MetricsKeyWrapper(
METRIC_REQUESTS_PROCESSING, MetricsPlaceValue.of(side, MetricsLevel.SERVICE))
.targetKey();
Assertions.assertTrue(metricsNames.contains(REQUESTS));
Assertions.assertTrue(metricsNames.contains(SUCCEED));
Assertions.assertTrue(metricsNames.contains(PROCESSING));
for (MetricSample metricSample : metricSamples) {
if (metricSample instanceof GaugeMetricSample) {
GaugeMetricSample<?> gaugeMetricSample = (GaugeMetricSample<?>) metricSample;
Object objVal = gaugeMetricSample.getValue();
if (objVal instanceof Map) {
Map<ServiceKeyMetric, AtomicLong> value = (Map<ServiceKeyMetric, AtomicLong>) objVal;
if (metricSample.getName().equals(REQUESTS)) {
Assertions.assertTrue(
value.values().stream().allMatch(atomicLong -> atomicLong.intValue() == 1));
}
if (metricSample.getName().equals(PROCESSING)) {
Assertions.assertTrue(
value.values().stream().allMatch(atomicLong -> atomicLong.intValue() == 0));
}
}
} else {
AtomicLong value = (AtomicLong) ((CounterMetricSample<?>) metricSample).getValue();
if (metricSample.getName().equals(SUCCEED)) {
Assertions.assertEquals(1, value.intValue());
}
}
}
metricsFilter.invoke(new TestMetricsInvoker(side), invocation);
try {
Thread.sleep(50);
} catch (InterruptedException e) {
e.printStackTrace();
}
metricsFilter.onError(
new RpcException(RpcException.TIMEOUT_EXCEPTION, "timeout"), new TestMetricsInvoker(side), invocation);
eventObj = (RequestEvent) invocation.get(METRIC_FILTER_EVENT);
long c2 = eventObj.getTimePair().calc();
metricSamples = collector.collect();
// num(total+success+error+total_error+processing) + rt(5) + error code = 11
Assertions.assertEquals(11, metricSamples.size());
String TIMEOUT = new MetricsKeyWrapper(
METRIC_REQUESTS_TIMEOUT, MetricsPlaceValue.of(side, MetricsLevel.SERVICE))
.targetKey();
String TOTAL_FAILED = new MetricsKeyWrapper(
METRIC_REQUESTS_TOTAL_FAILED, MetricsPlaceValue.of(side, MetricsLevel.SERVICE))
.targetKey();
for (MetricSample metricSample : metricSamples) {
if (metricSample instanceof GaugeMetricSample) {
GaugeMetricSample<?> gaugeMetricSample = (GaugeMetricSample<?>) metricSample;
Object objVal = gaugeMetricSample.getValue();
if (objVal instanceof Map) {
Map<ServiceKeyMetric, AtomicLong> value =
(Map<ServiceKeyMetric, AtomicLong>) ((GaugeMetricSample<?>) metricSample).getValue();
if (metricSample.getName().equals(REQUESTS)) {
Assertions.assertTrue(
value.values().stream().allMatch(atomicLong -> atomicLong.intValue() == 2));
}
if (metricSample.getName().equals(REQUESTS)) {
Assertions.assertTrue(
value.values().stream().allMatch(atomicLong -> atomicLong.intValue() == 2));
}
if (metricSample.getName().equals(PROCESSING)) {
Assertions.assertTrue(
value.values().stream().allMatch(atomicLong -> atomicLong.intValue() == 0));
}
if (metricSample.getName().equals(TIMEOUT)) {
Assertions.assertTrue(
value.values().stream().allMatch(atomicLong -> atomicLong.intValue() == 1));
}
if (metricSample.getName().equals(TOTAL_FAILED)) {
Assertions.assertTrue(
value.values().stream().allMatch(atomicLong -> atomicLong.intValue() == 1));
}
}
} else {
AtomicLong value = (AtomicLong) ((CounterMetricSample<?>) metricSample).getValue();
if (metricSample.getName().equals(SUCCEED)) {
Assertions.assertEquals(1, value.intValue());
}
}
}
// calc rt
for (MetricSample sample : metricSamples) {
Map<String, String> tags = sample.getTags();
Assertions.assertEquals(tags.get(TAG_APPLICATION_NAME), applicationModel.getApplicationName());
}
Map<String, Long> sampleMap = metricSamples.stream()
.filter(metricSample -> metricSample instanceof GaugeMetricSample)
.collect(Collectors.toMap(MetricSample::getName, k -> ((GaugeMetricSample) k).applyAsLong()));
Assertions.assertEquals(
sampleMap.get(new MetricsKeyWrapper(
MetricsKey.METRIC_RT_LAST, MetricsPlaceValue.of(side, MetricsLevel.SERVICE))
.targetKey()),
c2);
Assertions.assertEquals(
sampleMap.get(new MetricsKeyWrapper(
MetricsKey.METRIC_RT_MIN, MetricsPlaceValue.of(side, MetricsLevel.SERVICE))
.targetKey()),
Math.min(c1, c2));
Assertions.assertEquals(
sampleMap.get(new MetricsKeyWrapper(
MetricsKey.METRIC_RT_MAX, MetricsPlaceValue.of(side, MetricsLevel.SERVICE))
.targetKey()),
Math.max(c1, c2));
Assertions.assertEquals(
sampleMap.get(new MetricsKeyWrapper(
MetricsKey.METRIC_RT_AVG, MetricsPlaceValue.of(side, MetricsLevel.SERVICE))
.targetKey()),
(c1 + c2) / 2);
Assertions.assertEquals(
sampleMap.get(new MetricsKeyWrapper(
MetricsKey.METRIC_RT_SUM, MetricsPlaceValue.of(side, MetricsLevel.SERVICE))
.targetKey()),
c1 + c2);
}
}
| DefaultCollectorTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sql/internal/NativeQueryImpl.java | {
"start": 6883,
"end": 14724
} | class ____<R>
extends AbstractQuery<R>
implements NativeQueryImplementor<R>, DomainQueryExecutionContext, ResultSetMappingResolutionContext {
private final String sqlString;
private final String originalSqlString;
private final ParameterMetadataImplementor parameterMetadata;
private final List<ParameterOccurrence> parameterOccurrences;
private final QueryParameterBindings parameterBindings;
private final Class<R> resultType;
private final ResultSetMapping resultSetMapping;
private final boolean resultMappingSuppliedToCtor;
private final HashMap<String, EntityMappingType> entityMappingTypeByTableAlias = new HashMap<>();
private final QueryOptionsImpl queryOptions = new QueryOptionsImpl();
private Boolean startsWithSelect;
private Set<String> querySpaces;
private Callback callback;
/**
* Constructs a {@code NativeQueryImpl} given a SQL query defined in the mappings.
* Used by Hibernate Reactive.
*/
@SuppressWarnings("unused")
public NativeQueryImpl(NamedNativeQueryMemento<?> memento, SharedSessionContractImplementor session) {
this(
memento,
() -> buildResultSetMapping( getResultSetMappingName( memento ), false, session ),
(resultSetMapping, querySpaceConsumer, context ) -> {
if ( memento.getResultMappingName() != null ) {
final var resultSetMappingMemento =
getNamedObjectRepository( session )
.getResultSetMappingMemento( memento.getResultMappingName() );
if ( resultSetMappingMemento != null ) {
resultSetMappingMemento.resolve( resultSetMapping, querySpaceConsumer, context );
return true;
}
}
if ( memento.getResultType() != null ) {
resultSetMapping.addResultBuilder( resultClassBuilder( memento.getResultType(), context ) );
return true;
}
else {
return false;
}
},
null,
session
);
}
/**
* Constructs a {@code NativeQueryImpl} given a SQL query defined in the mappings.
*/
public NativeQueryImpl(
NamedNativeQueryMemento<?> memento,
Class<R> resultJavaType,
SharedSessionContractImplementor session) {
this(
memento,
() -> {
final String mappingIdentifier = resultJavaType != null ? resultJavaType.getName() : null;
return buildResultSetMapping( mappingIdentifier, false, session );
},
(resultSetMapping, querySpaceConsumer, context) -> {
if ( memento.getResultMappingName() != null ) {
final var resultSetMappingMemento =
getNamedObjectRepository( session )
.getResultSetMappingMemento( memento.getResultMappingName() );
if ( resultSetMappingMemento != null ) {
resultSetMappingMemento.resolve( resultSetMapping, querySpaceConsumer, context );
return true;
}
}
if ( memento.getResultType() != null ) {
resultSetMapping.addResultBuilder( resultClassBuilder( memento.getResultType(), context ) );
return true;
}
else {
return false;
}
},
resultJavaType,
session
);
}
/**
* Constructs a {@code NativeQueryImpl} given a SQL query defined in the mappings.
*/
public NativeQueryImpl(
NamedNativeQueryMemento<?> memento,
String resultSetMappingName,
SharedSessionContractImplementor session) {
this(
memento,
() -> buildResultSetMapping( resultSetMappingName, false, session ),
(resultSetMapping, querySpaceConsumer, context) -> {
final var mappingMemento =
getNamedObjectRepository( session )
.getResultSetMappingMemento( resultSetMappingName );
assert mappingMemento != null;
mappingMemento.resolve( resultSetMapping, querySpaceConsumer, context );
return true;
},
null,
session
);
}
private NativeQueryImpl(
NamedNativeQueryMemento<?> memento,
Supplier<ResultSetMapping> resultSetMappingCreator,
ResultSetMappingHandler resultSetMappingHandler,
@Nullable Class<R> resultClass,
SharedSessionContractImplementor session) {
super( session );
originalSqlString = memento.getOriginalSqlString();
querySpaces = new HashSet<>();
final var parameterInterpretation = resolveParameterInterpretation( originalSqlString, session );
sqlString = parameterInterpretation.getAdjustedSqlString();
parameterMetadata = parameterInterpretation.toParameterMetadata( session );
parameterOccurrences = parameterInterpretation.getOrderedParameterOccurrences();
parameterBindings = parameterMetadata.createBindings( session.getFactory() );
resultSetMapping = resultSetMappingCreator.get();
resultMappingSuppliedToCtor =
resultSetMappingHandler.resolveResultSetMapping( resultSetMapping, querySpaces::add, this );
resultType = resultClass;
handleExplicitResultSetMapping();
applyOptions( memento );
}
public NativeQueryImpl(
String sql,
NamedResultSetMappingMemento resultSetMappingMemento,
Class<R> resultClass,
SharedSessionContractImplementor session) {
super( session );
originalSqlString = sql;
querySpaces = new HashSet<>();
final var parameterInterpretation = resolveParameterInterpretation( sql, session );
sqlString = parameterInterpretation.getAdjustedSqlString();
parameterMetadata = parameterInterpretation.toParameterMetadata( session );
parameterOccurrences = parameterInterpretation.getOrderedParameterOccurrences();
parameterBindings = parameterMetadata.createBindings( session.getFactory() );
resultSetMapping = buildResultSetMapping( resultSetMappingMemento.getName(), false, session );
resultSetMappingMemento.resolve( resultSetMapping, this::addSynchronizedQuerySpace, this );
resultMappingSuppliedToCtor = true;
resultType = resultClass;
handleExplicitResultSetMapping();
}
public NativeQueryImpl(String sql, @Nullable Class<R> resultClass, SharedSessionContractImplementor session) {
super( session );
originalSqlString = sql;
querySpaces = new HashSet<>();
final var parameterInterpretation = resolveParameterInterpretation( sql, session );
sqlString = parameterInterpretation.getAdjustedSqlString();
parameterMetadata = parameterInterpretation.toParameterMetadata( session );
parameterOccurrences = parameterInterpretation.getOrderedParameterOccurrences();
parameterBindings = parameterMetadata.createBindings( session.getFactory() );
resultSetMapping = resolveResultSetMapping( sql, true, session.getFactory() );
resultMappingSuppliedToCtor = false;
resultType = resultClass;
handleImplicitResultSetMapping( session );
}
private void handleImplicitResultSetMapping(SharedSessionContractImplementor session) {
if ( resultType != null && !session.getFactory().getMappingMetamodel().isEntityClass( resultType ) ) {
setTupleTransformerForResultType( resultType );
}
}
private void handleExplicitResultSetMapping() {
if ( resultType != null ) {
if ( isResultTypeAlwaysAllowed( resultType ) ) {
setTupleTransformerForResultType( resultType );
}
else {
checkResultType( resultType, resultSetMapping );
}
}
}
private void checkResultType(Class<R> resultType, ResultSetMapping resultSetMapping) {
// resultType can be null if any of the deprecated methods were used to create the query
if ( resultType != null && !isResultTypeAlwaysAllowed( resultType )) {
switch ( resultSetMapping.getNumberOfResultBuilders() ) {
case 0:
if ( !resultSetMapping.isDynamic() ) {
throw new IllegalArgumentException( "Named query exists, but did not specify a resultClass" );
}
break;
case 1:
final var actualResultJavaType = resultSetMapping.getResultBuilders().get( 0 ).getJavaType();
if ( actualResultJavaType != null && !resultType.isAssignableFrom( actualResultJavaType ) ) {
throw buildIncompatibleException( resultType, actualResultJavaType );
}
break;
default:
// The return type has to be a | NativeQueryImpl |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/protocol/ClientProtocolProvider.java | {
"start": 1067,
"end": 1378
} | class ____ {
public abstract ClientProtocol create(Configuration conf) throws IOException;
public abstract ClientProtocol create(InetSocketAddress addr,
Configuration conf) throws IOException;
public abstract void close(ClientProtocol clientProtocol) throws IOException;
}
| ClientProtocolProvider |
java | elastic__elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java | {
"start": 3014,
"end": 18311
} | class ____ implements BiConsumer<Set<String>, ActionListener<RoleRetrievalResult>> {
private static final Pattern IN_SEGMENT_LINE = Pattern.compile("^\\s+.+");
private static final Pattern SKIP_LINE = Pattern.compile("(^#.*|^\\s*)");
private static final Logger logger = LogManager.getLogger(FileRolesStore.class);
private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder()
.allow2xFormat(true)
.allowDescription(true)
.build();
private final Settings settings;
private final Path file;
private final FileRoleValidator roleValidator;
private final XPackLicenseState licenseState;
private final NamedXContentRegistry xContentRegistry;
private final List<Consumer<Set<String>>> listeners = new ArrayList<>();
private volatile Map<String, RoleDescriptor> permissions;
public FileRolesStore(
Settings settings,
Environment env,
ResourceWatcherService watcherService,
XPackLicenseState licenseState,
NamedXContentRegistry xContentRegistry,
FileRoleValidator roleValidator
) throws IOException {
this(settings, env, watcherService, null, roleValidator, licenseState, xContentRegistry);
}
FileRolesStore(
Settings settings,
Environment env,
ResourceWatcherService watcherService,
Consumer<Set<String>> listener,
FileRoleValidator roleValidator,
XPackLicenseState licenseState,
NamedXContentRegistry xContentRegistry
) throws IOException {
this.settings = settings;
this.file = resolveFile(env);
this.roleValidator = roleValidator;
if (listener != null) {
listeners.add(listener);
}
this.licenseState = licenseState;
this.xContentRegistry = xContentRegistry;
FileWatcher watcher = new PrivilegedFileWatcher(file.getParent());
watcher.addListener(new FileListener());
watcherService.add(watcher, ResourceWatcherService.Frequency.HIGH);
permissions = parseFile(file, logger, settings, licenseState, xContentRegistry, roleValidator);
}
@Override
public void accept(Set<String> names, ActionListener<RoleRetrievalResult> listener) {
listener.onResponse(RoleRetrievalResult.success(roleDescriptors(names)));
}
Set<RoleDescriptor> roleDescriptors(Set<String> roleNames) {
final Map<String, RoleDescriptor> localPermissions = permissions;
Set<RoleDescriptor> descriptors = new HashSet<>();
roleNames.forEach((name) -> {
RoleDescriptor descriptor = localPermissions.get(name);
if (descriptor != null) {
descriptors.add(descriptor);
}
});
return descriptors;
}
public boolean exists(String name) {
final Map<String, RoleDescriptor> localPermissions = permissions;
return localPermissions.containsKey(name);
}
public Map<String, Object> usageStats() {
final Map<String, RoleDescriptor> localPermissions = permissions;
Map<String, Object> usageStats = Maps.newMapWithExpectedSize(3);
usageStats.put("size", localPermissions.size());
boolean dls = false;
boolean fls = false;
for (RoleDescriptor descriptor : localPermissions.values()) {
for (IndicesPrivileges indicesPrivileges : descriptor.getIndicesPrivileges()) {
fls = fls || indicesPrivileges.getGrantedFields() != null || indicesPrivileges.getDeniedFields() != null;
dls = dls || indicesPrivileges.getQuery() != null;
}
if (fls && dls) {
break;
}
}
usageStats.put("fls", fls);
usageStats.put("dls", dls);
usageStats.put("remote_indices", localPermissions.values().stream().filter(RoleDescriptor::hasRemoteIndicesPrivileges).count());
usageStats.put("remote_cluster", localPermissions.values().stream().filter(RoleDescriptor::hasRemoteClusterPermissions).count());
return usageStats;
}
public void addListener(Consumer<Set<String>> consumer) {
Objects.requireNonNull(consumer);
synchronized (this) {
listeners.add(consumer);
}
}
public Path getFile() {
return file;
}
/**
* @return a map of all file role definitions. The returned map is unmodifiable.
*/
public Map<String, RoleDescriptor> getAllRoleDescriptors() {
final Map<String, RoleDescriptor> localPermissions = permissions;
return Collections.unmodifiableMap(localPermissions);
}
// package private for testing
Set<String> getAllRoleNames() {
return permissions.keySet();
}
@Override
public String toString() {
return "file roles store (" + file + ")";
}
public static Path resolveFile(Environment env) {
return XPackPlugin.resolveConfigFile(env, "roles.yml");
}
public static Set<String> parseFileForRoleNames(Path path, Logger logger) {
if (logger == null) {
logger = NoOpLogger.INSTANCE;
}
Map<String, RoleDescriptor> roles = new HashMap<>();
logger.trace("attempting to read roles file located at [{}]", path.toAbsolutePath());
if (Files.exists(path)) {
try {
List<String> roleSegments = roleSegments(path);
for (String segment : roleSegments) {
RoleDescriptor rd = parseRoleDescriptor(
segment,
path,
logger,
false,
Settings.EMPTY,
NamedXContentRegistry.EMPTY,
new FileRoleValidator.Default()
);
if (rd != null) {
roles.put(rd.getName(), rd);
}
}
} catch (IOException ioe) {
logger.error(() -> format("failed to read roles file [%s]. skipping all roles...", path.toAbsolutePath()), ioe);
return emptySet();
}
}
return unmodifiableSet(roles.keySet());
}
public static Map<String, RoleDescriptor> parseFile(
Path path,
Logger logger,
Settings settings,
XPackLicenseState licenseState,
NamedXContentRegistry xContentRegistry,
FileRoleValidator roleValidator
) {
if (logger == null) {
logger = NoOpLogger.INSTANCE;
}
Map<String, RoleDescriptor> roles = new HashMap<>();
logger.debug("attempting to read roles file located at [{}]", path.toAbsolutePath());
if (Files.exists(path)) {
try {
List<String> roleSegments = roleSegments(path);
final boolean isDlsLicensed = DOCUMENT_LEVEL_SECURITY_FEATURE.checkWithoutTracking(licenseState);
for (String segment : roleSegments) {
RoleDescriptor descriptor = parseRoleDescriptor(segment, path, logger, true, settings, xContentRegistry, roleValidator);
if (descriptor != null) {
if (ReservedRolesStore.isReserved(descriptor.getName())) {
logger.warn(
"role [{}] is reserved. the relevant role definition in the mapping file will be ignored",
descriptor.getName()
);
} else if (descriptor.isUsingDocumentOrFieldLevelSecurity() && isDlsLicensed == false) {
logger.warn(
"role [{}] uses document and/or field level security, which is not enabled by the current license"
+ ". this role will be ignored",
descriptor.getName()
);
// we still put the role in the map to avoid unnecessary negative lookups
roles.put(descriptor.getName(), descriptor);
} else {
roles.put(descriptor.getName(), descriptor);
}
}
}
} catch (IOException ioe) {
logger.error(() -> format("failed to read roles file [%s]. skipping all roles...", path.toAbsolutePath()), ioe);
return emptyMap();
}
} else {
logger.debug("roles file does not exist");
return emptyMap();
}
logger.info("parsed [{}] roles from file [{}]", roles.size(), path.toAbsolutePath());
return unmodifiableMap(roles);
}
@Nullable
static RoleDescriptor parseRoleDescriptor(
String segment,
Path path,
Logger logger,
boolean resolvePermissions,
Settings settings,
NamedXContentRegistry xContentRegistry,
FileRoleValidator roleValidator
) {
String roleName = null;
XContentParserConfiguration parserConfig = XContentParserConfiguration.EMPTY.withRegistry(xContentRegistry)
.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE);
try (XContentParser parser = YamlXContent.yamlXContent.createParser(parserConfig, segment)) {
XContentParser.Token token = parser.nextToken();
if (token == XContentParser.Token.START_OBJECT) {
token = parser.nextToken();
if (token == XContentParser.Token.FIELD_NAME) {
roleName = parser.currentName();
Validation.Error validationError = Validation.Roles.validateRoleName(roleName, false);
if (validationError != null) {
logger.error(
"invalid role definition [{}] in roles file [{}]. invalid role name - {}. skipping role...",
roleName,
path.toAbsolutePath(),
validationError
);
return null;
}
if (resolvePermissions == false) {
return new RoleDescriptor(roleName, null, null, null);
}
token = parser.nextToken();
if (token == XContentParser.Token.START_OBJECT) {
// we do not want to reject files if field permissions are given in 2.x syntax, hence why we allow2xFormat
RoleDescriptor descriptor = ROLE_DESCRIPTOR_PARSER.parse(roleName, parser);
return checkDescriptor(descriptor, path, logger, settings, xContentRegistry, roleValidator);
} else {
logger.error("invalid role definition [{}] in roles file [{}]. skipping role...", roleName, path.toAbsolutePath());
return null;
}
}
}
logger.error("invalid role definition [{}] in roles file [{}]. skipping role...", roleName, path.toAbsolutePath());
} catch (ElasticsearchParseException e) {
assert roleName != null;
if (logger.isDebugEnabled()) {
final String finalRoleName = roleName;
logger.debug((Supplier<?>) () -> "parsing exception for role [" + finalRoleName + "]", e);
} else {
logger.error(e.getMessage() + ". skipping role...");
}
} catch (IOException | XContentParseException e) {
if (roleName != null) {
final String finalRoleName = roleName;
logger.error(() -> format("invalid role definition [%s] in roles file [%s]. skipping role...", finalRoleName, path), e);
} else {
logger.error(() -> format("invalid role definition [%s] in roles file [%s]. skipping role...", segment, path), e);
}
}
return null;
}
@Nullable
private static RoleDescriptor checkDescriptor(
RoleDescriptor descriptor,
Path path,
Logger logger,
Settings settings,
NamedXContentRegistry xContentRegistry,
FileRoleValidator roleValidator
) {
String roleName = descriptor.getName();
// first check if FLS/DLS is enabled on the role...
if (descriptor.isUsingDocumentOrFieldLevelSecurity()) {
if (XPackSettings.DLS_FLS_ENABLED.get(settings) == false) {
logger.error(
"invalid role definition [{}] in roles file [{}]. document and field level security is not "
+ "enabled. set [{}] to [true] in the configuration file. skipping role...",
roleName,
path.toAbsolutePath(),
XPackSettings.DLS_FLS_ENABLED.getKey()
);
return null;
} else {
try {
DLSRoleQueryValidator.validateQueryField(descriptor.getIndicesPrivileges(), xContentRegistry);
} catch (ElasticsearchException | IllegalArgumentException e) {
logger.error(
() -> format(
"invalid role definition [%s] in roles file [%s]. failed to validate query field. skipping role...",
roleName,
path.toAbsolutePath()
),
e
);
return null;
}
}
}
ActionRequestValidationException ex = roleValidator.validatePredefinedRole(descriptor);
if (ex != null) {
throw ex;
}
Validation.Error validationError = Validation.Roles.validateRoleDescription(descriptor.getDescription());
if (validationError != null) {
logger.error(
"invalid role definition [{}] in roles file [{}]. invalid description - {}. skipping role...",
roleName,
path.toAbsolutePath(),
validationError
);
return null;
}
return descriptor;
}
private static List<String> roleSegments(Path path) throws IOException {
List<String> segments = new ArrayList<>();
StringBuilder builder = null;
for (String line : Files.readAllLines(path, StandardCharsets.UTF_8)) {
if (SKIP_LINE.matcher(line).matches() == false) {
if (IN_SEGMENT_LINE.matcher(line).matches()) {
if (builder != null) {
builder.append(line).append("\n");
}
} else {
if (builder != null) {
segments.add(builder.toString());
}
builder = new StringBuilder(line).append("\n");
}
}
}
if (builder != null) {
segments.add(builder.toString());
}
return segments;
}
private | FileRolesStore |
java | apache__rocketmq | tools/src/main/java/org/apache/rocketmq/tools/command/topic/AllocateMQSubCommand.java | {
"start": 1639,
"end": 3758
} | class ____ implements SubCommand {
@Override
public String commandName() {
return "allocateMQ";
}
@Override
public String commandDesc() {
return "Allocate MQ.";
}
@Override
public Options buildCommandlineOptions(Options options) {
Option opt = new Option("t", "topic", true, "topic name");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("i", "ipList", true, "ipList");
opt.setRequired(true);
options.addOption(opt);
return options;
}
@Override
public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) throws SubCommandException {
DefaultMQAdminExt adminExt = new DefaultMQAdminExt(rpcHook);
adminExt.setInstanceName(Long.toString(System.currentTimeMillis()));
try {
adminExt.start();
String topic = commandLine.getOptionValue('t').trim();
String ips = commandLine.getOptionValue('i').trim();
final String[] split = ips.split(",");
final List<String> ipList = new LinkedList<>();
for (String ip : split) {
ipList.add(ip);
}
final TopicRouteData topicRouteData = adminExt.examineTopicRouteInfo(topic);
final Set<MessageQueue> mqs = MQClientInstance.topicRouteData2TopicSubscribeInfo(topic, topicRouteData);
final AllocateMessageQueueAveragely averagely = new AllocateMessageQueueAveragely();
RebalanceResult rr = new RebalanceResult();
for (String i : ipList) {
final List<MessageQueue> mqResult = averagely.allocate("aa", i, new ArrayList<>(mqs), ipList);
rr.getResult().put(i, mqResult);
}
final String json = RemotingSerializable.toJson(rr, false);
System.out.printf("%s%n", json);
} catch (Exception e) {
throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e);
} finally {
adminExt.shutdown();
}
}
}
| AllocateMQSubCommand |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/search/arguments/AggregateArgs.java | {
"start": 22627,
"end": 25673
} | class ____<K, V> implements PipelineOperation<K, V> {
private final List<K> properties;
private final List<Reducer<K, V>> reducers;
public GroupBy(List<K> properties) {
this.properties = new ArrayList<>(properties);
this.reducers = new ArrayList<>();
}
public GroupBy<K, V> reduce(Reducer<K, V> reducer) {
this.reducers.add(reducer);
return this;
}
/**
* Static factory method to create a GroupBy instance.
*
* @param properties the properties to group by
* @param <K> Key type
* @param <V> Value type
* @return new GroupBy instance
*/
@SafeVarargs
public static <K, V> GroupBy<K, V> of(K... properties) {
return new GroupBy<>(Arrays.asList(properties));
}
@Override
public void build(CommandArgs<K, V> args) {
args.add(CommandKeyword.GROUPBY);
args.add(properties.size());
for (K property : properties) {
// Add @ prefix if not already present
String propertyStr = property.toString();
if (!propertyStr.startsWith("@")) {
args.add("@" + propertyStr);
} else {
args.add(propertyStr);
}
}
for (Reducer<K, V> reducer : reducers) {
reducer.build(args);
}
}
}
/**
* Represents a SORTBY clause in an aggregation pipeline.
*
* <p>
* Sorts the pipeline results up until the point of SORTBY, using a list of properties. By default, sorting is ascending,
* but ASC or DESC can be specified for each property.
* </p>
*
* <h3>Example Usage:</h3>
*
* <pre>
*
* {
* @code
* // Simple sort by single field
* SortBy<String> sortBy = SortBy.of("price", SortDirection.DESC);
*
* // Sort with MAX optimization for top-N queries
* SortBy<String> topN = SortBy.of("score", SortDirection.DESC).max(100) // Only sort top 100 results
* .withCount(); // Include accurate count
*
* // Multiple sort criteria
* SortBy<String> multiSort = SortBy.of(new SortProperty<>("category", SortDirection.ASC),
* new SortProperty<>("price", SortDirection.DESC));
* }
* </pre>
*
* <h3>Performance Optimizations:</h3>
* <ul>
* <li><strong>MAX</strong> - Optimizes sorting by only processing the top N results</li>
* <li><strong>WITHCOUNT</strong> - Returns accurate counts but processes all results</li>
* <li><strong>SORTABLE fields</strong> - Use SORTABLE attribute in index for best performance</li>
* </ul>
*
* <p>
* <strong>Performance Note:</strong> Use {@code max()} for efficient top-N queries instead of sorting all results and then
* using LIMIT.
* </p>
*/
public static | GroupBy |
java | greenrobot__greendao | tests/DaoTestBase/src/main/java/org/greenrobot/greendao/daotest/ToManyEntityDao.java | {
"start": 471,
"end": 749
} | class ____ extends AbstractDao<ToManyEntity, Long> {
public static final String TABLENAME = "TO_MANY_ENTITY";
/**
* Properties of entity ToManyEntity.<br/>
* Can be used for QueryBuilder and for referencing column names.
*/
public static | ToManyEntityDao |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/histogram/HistogramPercentile.java | {
"start": 1954,
"end": 5245
} | class ____ extends EsqlScalarFunction {
public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(
Expression.class,
"HistogramPercentile",
HistogramPercentile::new
);
private final Expression histogram;
private final Expression percentile;
@FunctionInfo(returnType = { "double" })
public HistogramPercentile(
Source source,
@Param(name = "histogram", type = { "exponential_histogram" }) Expression histogram,
@Param(name = "percentile", type = { "double", "integer", "long", "unsigned_long" }) Expression percentile
) {
super(source, List.of(histogram, percentile));
this.histogram = histogram;
this.percentile = percentile;
}
private HistogramPercentile(StreamInput in) throws IOException {
this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class));
}
Expression histogram() {
return histogram;
}
Expression percentile() {
return percentile;
}
@Override
protected TypeResolution resolveType() {
return isType(histogram, dt -> dt == DataType.EXPONENTIAL_HISTOGRAM, sourceText(), DEFAULT, "exponential_histogram").and(
isType(percentile, DataType::isNumeric, sourceText(), DEFAULT, "numeric types")
);
}
@Override
public DataType dataType() {
return DataType.DOUBLE;
}
@Override
public Expression replaceChildren(List<Expression> newChildren) {
return new HistogramPercentile(source(), newChildren.get(0), newChildren.get(1));
}
@Override
public boolean foldable() {
return histogram.foldable() && percentile.foldable();
}
@Override
protected NodeInfo<? extends Expression> info() {
return NodeInfo.create(this, HistogramPercentile::new, histogram, percentile);
}
@Override
public String getWriteableName() {
return ENTRY.name;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
source().writeTo(out);
out.writeNamedWriteable(histogram);
out.writeNamedWriteable(percentile);
}
@Evaluator(warnExceptions = ArithmeticException.class)
static void process(DoubleBlock.Builder resultBuilder, ExponentialHistogram value, double percentile) {
if (percentile < 0.0 || percentile > 100.0) {
throw new ArithmeticException("Percentile value must be in the range [0, 100], got: " + percentile);
}
double result = ExponentialHistogramQuantile.getQuantile(value, percentile / 100.0);
if (Double.isNaN(result)) { // can happen if the histogram is empty
resultBuilder.appendNull();
} else {
resultBuilder.appendDouble(result);
}
}
@Override
public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) {
var fieldEvaluator = toEvaluator.apply(histogram);
var percentileEvaluator = Cast.cast(source(), percentile.dataType(), DataType.DOUBLE, toEvaluator.apply(percentile));
return new HistogramPercentileEvaluator.Factory(source(), fieldEvaluator, percentileEvaluator);
}
}
| HistogramPercentile |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy-client-mutiny/deployment/src/test/java/io/quarkus/restclient/mutiny/deployment/MutinyRestClientTest.java | {
"start": 1304,
"end": 1413
} | class ____ {
@GET
public String get() {
return "OK";
}
}
}
| TestEndpoint |
java | apache__camel | components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/FtpComponent.java | {
"start": 1380,
"end": 4726
} | class ____ extends RemoteFileComponent<FTPFile> {
public FtpComponent() {
}
public FtpComponent(CamelContext context) {
super(context);
}
@Override
protected GenericFileEndpoint<FTPFile> buildFileEndpoint(String uri, String remaining, Map<String, Object> parameters)
throws Exception {
String baseUri = getBaseUri(uri);
// lets make sure we create a new configuration as each endpoint can
// customize its own version
// must pass on baseUri to the configuration (see above)
FtpConfiguration config = new FtpConfiguration(new URI(baseUri));
FtpUtils.ensureRelativeFtpDirectory(this, config);
FtpEndpoint<FTPFile> answer = new FtpEndpoint<>(uri, this, config);
extractAndSetFtpClientConfigParameters(parameters, answer);
extractAndSetFtpClientParameters(parameters, answer);
return answer;
}
/**
* Get the base uri part before the options as they can be non URI valid such as the expression using $ chars and
* the URI constructor will regard $ as an illegal character, and we don't want to enforce end users to escape the $
* for the expression (file language)
*/
protected String getBaseUri(String uri) {
return StringHelper.before(uri, "?", uri);
}
/**
* Extract additional ftp client configuration options from the parameters map (parameters starting with
* 'ftpClientConfig.'). To remember these parameters, we set them in the endpoint and we can use them when creating
* a client.
*/
protected void extractAndSetFtpClientConfigParameters(Map<String, Object> parameters, FtpEndpoint<FTPFile> answer) {
if (PropertiesHelper.hasProperties(parameters, "ftpClientConfig.")) {
Map<String, Object> param = PropertiesHelper.extractProperties(parameters, "ftpClientConfig.");
answer.setFtpClientConfigParameters(param);
}
}
/**
* Extract additional ftp client options from the parameters map (parameters starting with 'ftpClient.'). To
* remember these parameters, we set them in the endpoint and we can use them when creating a client.
*/
protected void extractAndSetFtpClientParameters(Map<String, Object> parameters, FtpEndpoint<FTPFile> answer) {
if (PropertiesHelper.hasProperties(parameters, "ftpClient.")) {
Map<String, Object> param = PropertiesHelper.extractProperties(parameters, "ftpClient.");
answer.setFtpClientParameters(param);
}
}
@Override
protected void setProperties(Endpoint endpoint, Map<String, Object> parameters) throws Exception {
Object siteCommand = parameters.remove("siteCommand");
if (siteCommand != null) {
String cmd = PropertyConfigurerSupport.property(getCamelContext(), String.class, siteCommand);
if (EndpointHelper.isReferenceParameter(cmd)) {
cmd = EndpointHelper.resolveReferenceParameter(getCamelContext(), cmd, String.class);
}
((FtpEndpoint) endpoint).getConfiguration().setSiteCommand(cmd);
}
super.setProperties(endpoint, parameters);
}
@Override
protected void afterPropertiesSet(GenericFileEndpoint<FTPFile> endpoint) throws Exception {
// noop
}
}
| FtpComponent |
java | micronaut-projects__micronaut-core | http-client/src/test/java/io/micronaut/http/client/ProxyBackpressureTest.java | {
"start": 5335,
"end": 5867
} | class ____ {
volatile long emitted = 0;
@Get("/large")
Publisher<byte[]> large() {
return Flux.range(0, TOTAL_CHUNKS)
.map(i -> {
var arr = new byte[CHUNK_SIZE];
ThreadLocalRandom.current().nextBytes(arr);
return arr;
})
.doOnNext(it -> emitted += it.length);
}
}
@ServerFilter("/proxy")
@Requires(property = "spec.name", value = "ProxyBackpressureTest")
static | Ctrl |
java | quarkusio__quarkus | integration-tests/gradle/src/main/resources/conditional-dependencies/ext-m/deployment/src/main/java/org/acme/quarkus/ext/m/deployment/AcmeQuarkusExtProcessor.java | {
"start": 155,
"end": 355
} | class ____ {
private static final String FEATURE = "acme-quarkus-ext-m";
@BuildStep
FeatureBuildItem feature() {
return new FeatureBuildItem(FEATURE);
}
} | AcmeQuarkusExtProcessor |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/annotation/AnnotationSource.java | {
"start": 6420,
"end": 7192
} | interface ____ should be avoided in favour of
* direct use of the annotation metadata and only used for unique cases that require integrating third party libraries.
*
* @param annotationClass The annotation type
* @param <T> The annotation generic type
* @return All annotations by the given type
*/
@SuppressWarnings("unchecked")
default @NonNull <T extends Annotation> T[] synthesizeAnnotationsByType(@NonNull Class<T> annotationClass) {
ArgumentUtils.requireNonNull("annotationClass", annotationClass);
return (T[]) Array.newInstance(annotationClass, 0);
}
/**
* Synthesizes a new annotations from the metadata for the given type. This method works
* by creating a runtime proxy of the annotation | and |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/test/fakemetrics/FakeVertxMetrics.java | {
"start": 959,
"end": 2662
} | class ____ extends FakeMetricsBase implements VertxMetrics {
private final MetricsOptions options;
private volatile Vertx vertx;
public FakeVertxMetrics(MetricsOptions options) {
this.options = options;
}
public FakeVertxMetrics() {
this.options = new MetricsOptions();
}
public MetricsOptions options() {
return options;
}
public Vertx vertx() {
return vertx;
}
@Override
public boolean isMetricsEnabled() {
return true;
}
public EventBusMetrics createEventBusMetrics() {
return new FakeEventBusMetrics();
}
public HttpServerMetrics<?, ?, ?> createHttpServerMetrics(HttpServerOptions options, SocketAddress localAddress) {
return new FakeHttpServerMetrics();
}
public HttpClientMetrics<?, ?, ?> createHttpClientMetrics(HttpClientOptions options) {
return new FakeHttpClientMetrics(options.getMetricsName());
}
public TCPMetrics<?> createNetServerMetrics(NetServerOptions options, SocketAddress localAddress) {
return new FakeTCPMetrics();
}
public TCPMetrics<?> createNetClientMetrics(NetClientOptions options) {
return new FakeTCPMetrics();
}
@Override
public TransportMetrics<?> createQuicEndpointMetrics(QuicEndpointOptions options, SocketAddress localAddress) {
return new FakeQuicEndpointMetrics();
}
public DatagramSocketMetrics createDatagramSocketMetrics(DatagramSocketOptions options) {
return new FakeDatagramSocketMetrics();
}
@Override
public PoolMetrics<?, ?> createPoolMetrics(String type, String name, int maxSize) {
return new FakePoolMetrics(name, maxSize);
}
@Override
public void vertxCreated(Vertx vertx) {
this.vertx = vertx;
}
}
| FakeVertxMetrics |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/enum_interface_type_handler/XmlMapper.java | {
"start": 723,
"end": 776
} | interface ____ {
int insertUser(User user);
}
| XmlMapper |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/subselect/Book.java | {
"start": 178,
"end": 556
} | class ____ {
int id;
String title;
int authorId;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public int getAuthorId() {
return authorId;
}
public void setAuthorId(int authorId) {
this.authorId = authorId;
}
}
| Book |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/authentication/RememberMeServices.java | {
"start": 1827,
"end": 1918
} | interface ____ been designed to accommodate any of these remember-me models.
* <p>
* This | has |
java | quarkusio__quarkus | integration-tests/keycloak-authorization/src/main/java/io/quarkus/it/keycloak/UsersResource.java | {
"start": 867,
"end": 1153
} | class ____ {
private final String userName;
User(SecurityIdentity securityContext) {
this.userName = securityContext.getPrincipal().getName();
}
public String getUserName() {
return userName;
}
}
public static | User |
java | micronaut-projects__micronaut-core | http-client/src/main/java/io/micronaut/http/client/netty/ConnectionManager.java | {
"start": 42831,
"end": 45495
} | class ____ extends CustomizerAwareInitializer {
private final PoolHolder pool;
Http2UpgradeInitializer(PoolHolder pool) {
this.pool = pool;
}
@Override
protected void initChannel(@NonNull Channel ch) throws Exception {
NettyClientCustomizer connectionCustomizer = bootstrappedCustomizer.specializeForChannel(ch, NettyClientCustomizer.ChannelRole.CONNECTION);
insertPcapLoggingHandlerLazy(ch, "outer");
Http2FrameCodec frameCodec = makeFrameCodec();
HttpClientCodec sourceCodec = new HttpClientCodec(
HttpClientConfiguration.DEFAULT_MAX_INITIAL_LINE_LENGTH,
configuration.getMaxHeaderSize(),
HttpClientConfiguration.DEFAULT_MAX_CHUNK_SIZE);
Http2ClientUpgradeCodec upgradeCodec = new Http2ClientUpgradeCodec(frameCodec,
new ChannelInitializer<Channel>() {
@Override
protected void initChannel(@NonNull Channel ch) throws Exception {
ch.pipeline().addLast(ChannelPipelineCustomizer.HANDLER_HTTP2_CONNECTION, frameCodec);
initHttp2(pool, ch, connectionCustomizer);
}
});
HttpClientUpgradeHandler upgradeHandler = new HttpClientUpgradeHandler(sourceCodec, upgradeCodec, 65536);
ch.pipeline().addLast(ChannelPipelineCustomizer.HANDLER_HTTP_CLIENT_CODEC, sourceCodec);
ch.pipeline().addLast(upgradeHandler);
ch.pipeline().addLast(ChannelPipelineCustomizer.HANDLER_HTTP2_UPGRADE_REQUEST, new ActivityHandler() {
@Override
public void channelActive0(@NonNull ChannelHandlerContext ctx) throws Exception {
DefaultFullHttpRequest upgradeRequest =
new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/", Unpooled.EMPTY_BUFFER);
// Set HOST header as the remote peer may require it.
upgradeRequest.headers().set(HttpHeaderNames.HOST, pool.requestKey.getHost() + ':' + pool.requestKey.getPort());
ctx.writeAndFlush(upgradeRequest);
ctx.pipeline().remove(ChannelPipelineCustomizer.HANDLER_HTTP2_UPGRADE_REQUEST);
// read the upgrade response
ctx.read();
}
});
ch.pipeline().addLast(ChannelPipelineCustomizer.HANDLER_INITIAL_ERROR, pool.initialErrorHandler);
connectionCustomizer.onInitialPipelineBuilt();
}
}
private final | Http2UpgradeInitializer |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/buildextension/beans/SyntheticBeanWithWildcardParameterizedTypeArrayTest.java | {
"start": 716,
"end": 1253
} | class ____ {
@RegisterExtension
public ArcTestContainer container = ArcTestContainer.builder()
.beanRegistrars(new MyBeanRegistrar())
.shouldFail()
.build();
@Test
public void trigger() {
Throwable error = container.getFailure();
assertNotNull(error);
assertInstanceOf(DefinitionException.class, error);
assertTrue(error.getMessage().contains("Wildcard type is not a legal bean type"));
}
static | SyntheticBeanWithWildcardParameterizedTypeArrayTest |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/validators/BaseTypeParser.java | {
"start": 1716,
"end": 5296
} | class ____: " + baseClassName);
}
for (;;) {
RxMethod m = new RxMethod();
int javadocStart = b.indexOf("/**", baseIndex);
if (javadocStart < 0) {
break;
}
int javadocEnd = b.indexOf("*/", javadocStart + 2);
m.javadoc = b.substring(javadocStart, javadocEnd + 2);
m.javadocLine = JavadocForAnnotations.lineNumber(b, javadocStart);
int backpressureDoc = b.indexOf("<dt><b>Backpressure:</b></dt>", javadocStart);
if (backpressureDoc > 0 && backpressureDoc < javadocEnd) {
m.backpressureDocLine = JavadocForAnnotations.lineNumber(b, backpressureDoc);
int nextDD = b.indexOf("</dd>", backpressureDoc);
if (nextDD > 0 && nextDD < javadocEnd) {
m.backpressureDocumentation = b.substring(backpressureDoc, nextDD + 5);
}
}
int schedulerDoc = b.indexOf("<dt><b>Scheduler:</b></dt>", javadocStart);
if (schedulerDoc > 0 && schedulerDoc < javadocEnd) {
m.schedulerDocLine = JavadocForAnnotations.lineNumber(b, schedulerDoc);
int nextDD = b.indexOf("</dd>", schedulerDoc);
if (nextDD > 0 && nextDD < javadocEnd) {
m.schedulerDocumentation = b.substring(schedulerDoc, nextDD + 5);
}
}
int staticMethodDef = b.indexOf("public static ", javadocEnd + 2);
if (staticMethodDef < 0) {
staticMethodDef = Integer.MAX_VALUE;
}
int instanceMethodDef = b.indexOf("public final ", javadocEnd + 2);
if (instanceMethodDef < 0) {
instanceMethodDef = Integer.MAX_VALUE;
}
int javadocStartNext = b.indexOf("/**", javadocEnd + 2);
if (javadocStartNext < 0) {
javadocStartNext = Integer.MAX_VALUE;
}
int definitionStart = -1;
if (staticMethodDef > 0 && staticMethodDef < javadocStartNext && staticMethodDef < instanceMethodDef) {
definitionStart = staticMethodDef;
}
if (instanceMethodDef > 0 && instanceMethodDef < javadocStartNext && instanceMethodDef < staticMethodDef) {
definitionStart = instanceMethodDef;
}
if (definitionStart > 0) {
int methodDefEnd = b.indexOf("{", definitionStart);
m.signature = b.substring(definitionStart, methodDefEnd + 1);
m.methodLine = JavadocForAnnotations.lineNumber(b, definitionStart);
int backpressureSpec = b.indexOf("@BackpressureSupport(", javadocEnd);
if (backpressureSpec > 0 && backpressureSpec < definitionStart) {
int backpressureSpecEnd = b.indexOf(")", backpressureSpec + 21);
m.backpressureKind = b.substring(backpressureSpec + 21, backpressureSpecEnd);
}
int schhedulerSpec = b.indexOf("@SchedulerSupport(", javadocEnd);
if (schhedulerSpec > 0 && schhedulerSpec < definitionStart) {
int schedulerSpecEnd = b.indexOf(")", schhedulerSpec + 18);
m.schedulerKind = b.substring(schhedulerSpec + 18, schedulerSpecEnd);
}
list.add(m);
baseIndex = methodDefEnd;
} else {
baseIndex = javadocEnd + 2;
}
}
return list;
}
}
| file |
java | apache__camel | components/camel-irc/src/test/java/org/apache/camel/component/irc/it/IrcsWithSslContextParamsRouteIT.java | {
"start": 1122,
"end": 2864
} | class ____ extends IrcRouteIT {
// TODO This test is disabled until we can find a public SSL enabled IRC
// server to test against. To use this test, follow the following procedures:
// 1) Download and install UnrealIRCd 3.2.9 from http://www.unrealircd.com/
// 2) Copy the contents of the src/test/unrealircd folder into the installation
// folder of UnrealIRCd.
// 3) Start UnrealIRCd and execute this test. Often the test executes quicker than
// the IRC server responds and the assertion will fail. In order to get the test to
// pass reliably, you may need to set a break point in IrcEndpoint#joinChanel in order
// to slow the route creation down enough for the event listener to be in place
// when camel-con joins the room.
@BindToRegistry("sslContextParameters")
protected SSLContextParameters loadSslContextParams() {
KeyStoreParameters ksp = new KeyStoreParameters();
ksp.setResource("localhost.p12");
ksp.setPassword("changeit");
TrustManagersParameters tmp = new TrustManagersParameters();
tmp.setKeyStore(ksp);
SSLContextParameters sslContextParameters = new SSLContextParameters();
sslContextParameters.setTrustManagers(tmp);
return sslContextParameters;
}
@Override
protected String sendUri() {
return "ircs://camel-prd-user@localhost:6669/#camel-test?nickname=camel-prd&password=password&sslContextParameters=#sslContextParameters";
}
@Override
protected String fromUri() {
return "ircs://camel-con-user@localhost:6669/#camel-test?nickname=camel-con&password=password&sslContextParameters=#sslContextParameters";
}
}
| IrcsWithSslContextParamsRouteIT |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/cid/NestedCompositeIdWithOrderedUpdatesTest.java | {
"start": 2172,
"end": 2495
} | class ____
{
@EmbeddedId
private AId id;
private String avalue;
public AId getId()
{
return id;
}
public void setId(AId id)
{
this.id=id;
}
public String getAvalue()
{
return avalue;
}
public void setAvalue(String avalue)
{
this.avalue=avalue;
}
}
@Embeddable
public static | A |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.