language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
spring-projects__spring-framework
spring-context/src/main/java/org/springframework/format/datetime/standard/DateTimeConverters.java
{ "start": 4331, "end": 4556 }
class ____ implements Converter<ZonedDateTime, LocalDateTime> { @Override public LocalDateTime convert(ZonedDateTime source) { return source.toLocalDateTime(); } } private static
ZonedDateTimeToLocalDateTimeConverter
java
grpc__grpc-java
opentelemetry/src/main/java/io/grpc/opentelemetry/OpenTelemetryMetricsModule.java
{ "start": 25517, "end": 27967 }
class ____ implements ClientInterceptor { private final String target; private final ImmutableList<OpenTelemetryPlugin> plugins; MetricsClientInterceptor(String target, ImmutableList<OpenTelemetryPlugin> plugins) { this.target = checkNotNull(target, "target"); this.plugins = checkNotNull(plugins, "plugins"); } @Override public <ReqT, RespT> ClientCall<ReqT, RespT> interceptCall( MethodDescriptor<ReqT, RespT> method, CallOptions callOptions, Channel next) { final List<OpenTelemetryPlugin.ClientCallPlugin> callPlugins; if (plugins.isEmpty()) { callPlugins = Collections.emptyList(); } else { List<OpenTelemetryPlugin.ClientCallPlugin> callPluginsMutable = new ArrayList<>(plugins.size()); for (OpenTelemetryPlugin plugin : plugins) { callPluginsMutable.add(plugin.newClientCallPlugin()); } callPlugins = Collections.unmodifiableList(callPluginsMutable); for (OpenTelemetryPlugin.ClientCallPlugin plugin : callPlugins) { callOptions = plugin.filterCallOptions(callOptions); } } // Only record method name as an attribute if isSampledToLocalTracing is set to true, // which is true for all generated methods. Otherwise, programatically // created methods result in high cardinality metrics. final CallAttemptsTracerFactory tracerFactory = new CallAttemptsTracerFactory( OpenTelemetryMetricsModule.this, target, recordMethodName(method.getFullMethodName(), method.isSampledToLocalTracing()), callPlugins); ClientCall<ReqT, RespT> call = next.newCall(method, callOptions.withStreamTracerFactory(tracerFactory)); return new SimpleForwardingClientCall<ReqT, RespT>(call) { @Override public void start(Listener<RespT> responseListener, Metadata headers) { for (OpenTelemetryPlugin.ClientCallPlugin plugin : callPlugins) { plugin.addMetadata(headers); } delegate().start( new SimpleForwardingClientCallListener<RespT>(responseListener) { @Override public void onClose(Status status, Metadata trailers) { tracerFactory.callEnded(status); super.onClose(status, trailers); } }, headers); } }; } } }
MetricsClientInterceptor
java
google__auto
value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java
{ "start": 138825, "end": 139291 }
class ____ {", " public abstract String foo();", "}"); JavaFileObject annotation1FileObject = JavaFileObjects.forSourceLines( "com.package1.Annotation1", "package com.package1;", "", "import java.lang.annotation.ElementType;", "import java.lang.annotation.Target;", "", "@Target({ElementType.FIELD, ElementType.METHOD})", "public @
Parent
java
apache__kafka
clients/src/main/java/org/apache/kafka/common/requests/StreamsGroupDescribeRequest.java
{ "start": 1281, "end": 3447 }
class ____ extends AbstractRequest.Builder<StreamsGroupDescribeRequest> { private final StreamsGroupDescribeRequestData data; public Builder(StreamsGroupDescribeRequestData data) { super(ApiKeys.STREAMS_GROUP_DESCRIBE); this.data = data; } @Override public StreamsGroupDescribeRequest build(short version) { return new StreamsGroupDescribeRequest(data, version); } @Override public String toString() { return data.toString(); } } private final StreamsGroupDescribeRequestData data; public StreamsGroupDescribeRequest(StreamsGroupDescribeRequestData data, short version) { super(ApiKeys.STREAMS_GROUP_DESCRIBE, version); this.data = data; } @Override public StreamsGroupDescribeResponse getErrorResponse(int throttleTimeMs, Throwable e) { StreamsGroupDescribeResponseData data = new StreamsGroupDescribeResponseData() .setThrottleTimeMs(throttleTimeMs); // Set error for each group this.data.groupIds().forEach( groupId -> data.groups().add( new StreamsGroupDescribeResponseData.DescribedGroup() .setGroupId(groupId) .setErrorCode(Errors.forException(e).code()) ) ); return new StreamsGroupDescribeResponse(data); } @Override public StreamsGroupDescribeRequestData data() { return data; } public static StreamsGroupDescribeRequest parse(Readable readable, short version) { return new StreamsGroupDescribeRequest( new StreamsGroupDescribeRequestData(readable, version), version ); } public static List<StreamsGroupDescribeResponseData.DescribedGroup> getErrorDescribedGroupList( List<String> groupIds, Errors error ) { return groupIds.stream() .map(groupId -> new StreamsGroupDescribeResponseData.DescribedGroup() .setGroupId(groupId) .setErrorCode(error.code()) ).collect(Collectors.toList()); } }
Builder
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/schemaupdate/idbag/IdBagSequenceTest.java
{ "start": 1282, "end": 2025 }
class ____ { @Test public void testIdBagSequenceGeneratorIsCreated( DomainModelScope modelScope, @TempDir File tmpDir) throws Exception { final var scriptFile = new File( tmpDir, "update_script.sql" ); final var metadata = modelScope.getDomainModel(); metadata.orderColumns( false ); metadata.validate(); new SchemaUpdate() .setHaltOnError( true ) .setOutputFile( scriptFile.getAbsolutePath() ) .setDelimiter( ";" ) .setFormat( true ) .execute( EnumSet.of( TargetType.SCRIPT ), metadata ); String fileContent = new String( Files.readAllBytes( scriptFile.toPath() ) ); MatcherAssert.assertThat( fileContent.toLowerCase().contains( "create sequence seq_child_id" ), is( true ) ); } }
IdBagSequenceTest
java
apache__flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/TableImpl.java
{ "start": 24554, "end": 26693 }
class ____ implements WindowGroupedTable { private final TableImpl table; private final List<Expression> groupKeys; private final GroupWindow window; private WindowGroupedTableImpl( TableImpl table, List<Expression> groupKeys, GroupWindow window) { this.table = table; this.groupKeys = groupKeys; this.window = window; } @Override public Table select(Expression... fields) { List<Expression> expressionsWithResolvedCalls = table.preprocessExpressions(fields); CategorizedExpressions extracted = OperationExpressionsUtils.extractAggregationsAndProperties( expressionsWithResolvedCalls); return table.createTable( table.operationTreeBuilder.project( extracted.getProjections(), table.operationTreeBuilder.windowAggregate( groupKeys, window, extracted.getWindowProperties(), extracted.getAggregations(), table.operationTree), // required for proper resolution of the time attribute in multi-windows true)); } @Override public AggregatedTable aggregate(Expression aggregateFunction) { return new WindowAggregatedTableImpl(table, groupKeys, aggregateFunction, window); } @Override public FlatAggregateTable flatAggregate(Expression tableAggregateFunction) { return new WindowFlatAggregateTableImpl( table, groupKeys, tableAggregateFunction, window); } } // -------------------------------------------------------------------------------------------- // Window Aggregated Table // -------------------------------------------------------------------------------------------- private static final
WindowGroupedTableImpl
java
spring-projects__spring-framework
spring-test/src/test/java/org/springframework/test/context/jdbc/PopulatedSchemaDatabaseConfig.java
{ "start": 1263, "end": 1724 }
class ____ { @Bean PlatformTransactionManager transactionManager() { return new DataSourceTransactionManager(dataSource()); } @Bean DataSource dataSource() { return new EmbeddedDatabaseBuilder() .generateUniqueName(true) .addScript("classpath:/org/springframework/test/context/jdbc/schema.sql") .build(); } @Bean JdbcTemplate jdbcTemplate(DataSource dataSource) { return new JdbcTemplate(dataSource); } }
PopulatedSchemaDatabaseConfig
java
apache__camel
components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/api/MultiSelectPicklistDeserializer.java
{ "start": 1525, "end": 2406 }
class ____ extends StdDeserializer<Object> implements ContextualDeserializer { private static final long serialVersionUID = -4568286926393043366L; private static final String FACTORY_METHOD = "fromValue"; private final Class<? extends Enum<?>> enumClass; private final Method factoryMethod; public MultiSelectPicklistDeserializer() { super(Object.class); this.factoryMethod = null; this.enumClass = null; } public MultiSelectPicklistDeserializer(JsonParser jp, Class<? extends Enum<?>> enumClass) throws JsonMappingException { super(enumClass); this.enumClass = enumClass; try { this.factoryMethod = enumClass.getMethod(FACTORY_METHOD, String.class); } catch (NoSuchMethodException e) { throw new JsonMappingException(jp, "Invalid pick-list
MultiSelectPicklistDeserializer
java
netty__netty
codec-stomp/src/main/java/io/netty/handler/codec/stomp/StompContentSubframe.java
{ "start": 1256, "end": 1777 }
interface ____ extends ByteBufHolder, StompSubframe { @Override StompContentSubframe copy(); @Override StompContentSubframe duplicate(); @Override StompContentSubframe retainedDuplicate(); @Override StompContentSubframe replace(ByteBuf content); @Override StompContentSubframe retain(); @Override StompContentSubframe retain(int increment); @Override StompContentSubframe touch(); @Override StompContentSubframe touch(Object hint); }
StompContentSubframe
java
quarkusio__quarkus
extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/serialization/BufferSerializer.java
{ "start": 204, "end": 423 }
class ____ implements Serializer<Buffer> { @Override public byte[] serialize(String topic, Buffer data) { if (data == null) return null; return data.getBytes(); } }
BufferSerializer
java
apache__camel
dsl/camel-jbang/camel-jbang-core/src/main/java/org/apache/camel/dsl/jbang/core/commands/ExportHelper.java
{ "start": 1062, "end": 1201 }
class ____ export utilities for instance when copying files from temporary work folder to export directory * target. */ public final
provides
java
quarkusio__quarkus
extensions/smallrye-health/deployment/src/main/java/io/quarkus/smallrye/health/deployment/SmallRyeHealthProcessor.java
{ "start": 4184, "end": 6553 }
class ____ { private static final Logger LOG = Logger.getLogger(SmallRyeHealthProcessor.class); private static final String CONFIG_KEY_HEALTH_MANAGEMENT_ENABLED = "quarkus.smallrye-health.management.enabled"; private static final DotName LIVENESS = DotName.createSimple(Liveness.class.getName()); private static final DotName READINESS = DotName.createSimple(Readiness.class.getName()); private static final DotName STARTUP = DotName.createSimple(Startup.class.getName()); private static final DotName HEALTH_GROUP = DotName.createSimple(HealthGroup.class.getName()); private static final DotName HEALTH_GROUPS = DotName.createSimple(HealthGroups.class.getName()); private static final DotName WELLNESS = DotName.createSimple(Wellness.class.getName()); private static final DotName JAX_RS_PATH = DotName.createSimple("jakarta.ws.rs.Path"); // For the UI private static final GACT HEALTH_UI_WEBJAR_ARTIFACT_KEY = new GACT("io.smallrye", "smallrye-health-ui", null, "jar"); private static final String HEALTH_UI_WEBJAR_STATIC_RESOURCES_PATH = "META-INF/resources/health-ui/"; private static final String JS_FILE_TO_UPDATE = "healthui.js"; private static final String INDEX_FILE_TO_UPDATE = "index.html"; // Branding files to monitor for changes private static final String BRANDING_DIR = "META-INF/branding/"; private static final String BRANDING_LOGO_GENERAL = BRANDING_DIR + "logo.png"; private static final String BRANDING_LOGO_MODULE = BRANDING_DIR + "smallrye-health-ui.png"; private static final String BRANDING_STYLE_GENERAL = BRANDING_DIR + "style.css"; private static final String BRANDING_STYLE_MODULE = BRANDING_DIR + "smallrye-health-ui.css"; private static final String BRANDING_FAVICON_GENERAL = BRANDING_DIR + "favicon.ico"; private static final String BRANDING_FAVICON_MODULE = BRANDING_DIR + "smallrye-health-ui.ico"; // For Kubernetes exposing private static final String SCHEME_HTTP = "HTTP"; private static final String SCHEME_HTTPS = "HTTPS"; // For Management ports private static final String MANAGEMENT_SSL_PREFIX = "quarkus.management.ssl.certificate."; private static final List<String> MANAGEMENT_SSL_PROPERTIES = List.of("key-store-file", "trust-store-file", "files", "key-files"); static
SmallRyeHealthProcessor
java
apache__flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/Sum0AggFunction.java
{ "start": 4918, "end": 5340 }
class ____ extends Sum0AggFunction { @Override public DataType getResultType() { return DataTypes.SMALLINT(); } @Override public Expression[] initialValuesExpressions() { return new Expression[] {/* sum0= */ literal((short) 0, getResultType().notNull())}; } } /** Built-in Long Sum0 aggregate function. */ public static
ShortSum0AggFunction
java
elastic__elasticsearch
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/EsqlInfoTransportAction.java
{ "start": 648, "end": 1155 }
class ____ extends XPackInfoFeatureTransportAction { @Inject public EsqlInfoTransportAction(TransportService transportService, ActionFilters actionFilters) { super(XPackInfoFeatureAction.ESQL.name(), transportService, actionFilters); } @Override public String name() { return XPackField.ESQL; } @Override public boolean available() { return true; } @Override public boolean enabled() { return true; } }
EsqlInfoTransportAction
java
apache__camel
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/SchematronComponentBuilderFactory.java
{ "start": 1876, "end": 3994 }
interface ____ extends ComponentBuilder<SchematronComponent> { /** * Whether the producer should be started lazy (on the first message). * By starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during * starting and cause the route to fail being started. By deferring this * startup to be lazy then the startup failure can be handled during * routing messages via Camel's routing error handlers. Beware that when * the first message is processed then creating and starting the * producer may take a little time and prolong the total processing time * of the processing. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: false * Group: producer * * @param lazyStartProducer the value to set * @return the dsl builder */ default SchematronComponentBuilder lazyStartProducer(boolean lazyStartProducer) { doSetProperty("lazyStartProducer", lazyStartProducer); return this; } /** * Whether autowiring is enabled. This is used for automatic autowiring * options (the option must be marked as autowired) by looking up in the * registry to find if there is a single instance of matching type, * which then gets configured on the component. This can be used for * automatic configuring JDBC data sources, JMS connection factories, * AWS Clients, etc. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: true * Group: advanced * * @param autowiredEnabled the value to set * @return the dsl builder */ default SchematronComponentBuilder autowiredEnabled(boolean autowiredEnabled) { doSetProperty("autowiredEnabled", autowiredEnabled); return this; } }
SchematronComponentBuilder
java
apache__kafka
clients/src/main/java/org/apache/kafka/clients/admin/DescribeUserScramCredentialsOptions.java
{ "start": 987, "end": 1094 }
class ____ extends AbstractOptions<DescribeUserScramCredentialsOptions> { }
DescribeUserScramCredentialsOptions
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/internal/util/collections/LockModeEnumMap.java
{ "start": 309, "end": 608 }
enum ____. * This implementation favours fast read operations * and low memory consumption over other metrics. * * Specifically designed with specific use cases in mind: * do not overly reuse without good reasons. * * @param <V> the value type to be associated with each key */ public final
keys
java
spring-projects__spring-boot
module/spring-boot-devtools/src/main/java/org/springframework/boot/devtools/remote/server/UrlHandlerMapper.java
{ "start": 970, "end": 1626 }
class ____ implements HandlerMapper { private final String requestUri; private final Handler handler; /** * Create a new {@link UrlHandlerMapper}. * @param url the URL to map * @param handler the handler to use */ public UrlHandlerMapper(String url, Handler handler) { Assert.hasLength(url, "'url' must not be empty"); Assert.isTrue(url.startsWith("/"), "'url' must start with '/'"); this.requestUri = url; this.handler = handler; } @Override public @Nullable Handler getHandler(ServerHttpRequest request) { if (this.requestUri.equals(request.getURI().getPath())) { return this.handler; } return null; } }
UrlHandlerMapper
java
micronaut-projects__micronaut-core
http-netty/src/main/java/io/micronaut/http/netty/body/JsonCounter.java
{ "start": 15546, "end": 17497 }
enum ____ { /** * Default state, anything that's not inside a string, not a top-level scalar (numbers, * booleans, null), and not a special state for {@link #unwrapTopLevelArray() unwrapping}. */ BASE, /** * State inside a string. Braces are ignored, and escape sequences get special handling. */ STRING, /** * State inside a "top-level scalar", i.e. a boolean, number or {@code null} that is not * part of an array or object. These are a bit special because unlike strings, which * terminate on {@code "}, and structures, which terminate on a bracket, these terminate on * whitespace. */ TOP_LEVEL_SCALAR, /** * State just after {@code \} inside a {@link #STRING}. The next byte is ignored, and then * we return to {@link #STRING} state. */ ESCAPE, /** * Special state for {@link #unwrapTopLevelArray() unwrapping}, before the top-level array. * At this point we don't know if there is a top-level array that we need to unwrap or not. */ BEFORE_UNWRAP_ARRAY, /** * Special state for {@link #unwrapTopLevelArray() unwrapping}, after the closing brace of * a top-level array. Any further tokens after this are an error. */ AFTER_UNWRAP_ARRAY, /** * Special state for {@link #noTokenization()}. The input is not visited at all, we just * assume everything is part of one root-level token and buffer it all. */ BUFFER_ALL, } /** * A region that contains a JSON node. Positions are relative to {@link #position()}. * * @param start First byte position of this node * @param end Position after the last byte of this node (i.e. it's exclusive) */ public record BufferRegion(long start, long end) { } }
State
java
apache__camel
components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/api/dto/bulkv2/LineEndingEnum.java
{ "start": 873, "end": 1345 }
enum ____ { LF("lf"), CRLF("crlf"); private final String value; LineEndingEnum(String value) { this.value = value; } public String value() { return value; } public static LineEndingEnum fromValue(String v) { for (LineEndingEnum c : LineEndingEnum.values()) { if (c.value.equals(v)) { return c; } } throw new IllegalArgumentException(v); } }
LineEndingEnum
java
netty__netty
codec-http/src/test/java/io/netty/handler/codec/http/websocketx/WebSocketProtocolHandlerTest.java
{ "start": 1543, "end": 1617 }
class ____ in {@link WebSocketClientProtocolHandler}. */ public
functionality
java
apache__flink
flink-runtime/src/test/java/org/apache/flink/streaming/api/operators/StreamSinkOperatorTest.java
{ "start": 1362, "end": 3180 }
class ____ { /** * Verify that we can correctly query watermark, processing time and the timestamp from the * context. */ @Test void testTimeQuerying() throws Exception { BufferingQueryingSink<String> bufferingSink = new BufferingQueryingSink<>(); StreamSink<String> operator = new StreamSink<>(bufferingSink); OneInputStreamOperatorTestHarness<String, Object> testHarness = new OneInputStreamOperatorTestHarness<>(operator); testHarness.setup(); testHarness.open(); testHarness.processWatermark(new Watermark(17)); testHarness.setProcessingTime(12); testHarness.processElement(new StreamRecord<>("Hello", 12L)); testHarness.processWatermark(new Watermark(42)); testHarness.setProcessingTime(15); testHarness.processElement(new StreamRecord<>("Ciao", 13L)); testHarness.processWatermark(new Watermark(42)); testHarness.setProcessingTime(15); testHarness.processElement(new StreamRecord<>("Ciao")); assertThat(bufferingSink.data).hasSize(3); assertThat(bufferingSink.data) .contains( new Tuple4<>(17L, 12L, 12L, "Hello"), new Tuple4<>(42L, 15L, 13L, "Ciao"), new Tuple4<>(42L, 15L, null, "Ciao")); assertThat(bufferingSink.watermarks).hasSize(3); assertThat(bufferingSink.watermarks) .contains( new org.apache.flink.api.common.eventtime.Watermark(17L), new org.apache.flink.api.common.eventtime.Watermark(42L), new org.apache.flink.api.common.eventtime.Watermark(42L)); testHarness.close(); } private static
StreamSinkOperatorTest
java
apache__kafka
streams/src/test/java/org/apache/kafka/streams/kstream/internals/AbstractStreamTest.java
{ "start": 3935, "end": 4621 }
class ____<K, V> extends AbstractStream<K, V> { ExtendedKStream(final KStream<K, V> stream) { super((KStreamImpl<K, V>) stream); } KStream<K, V> randomFilter() { final String name = builder.newProcessorName("RANDOM-FILTER-"); final ProcessorGraphNode<K, V> processorNode = new ProcessorGraphNode<>( name, new ProcessorParameters<>(new ExtendedKStreamDummy<>(), name)); builder.addGraphNode(this.graphNode, processorNode); return new KStreamImpl<>(name, null, null, subTopologySourceNodes, false, processorNode, builder); } } private static
ExtendedKStream
java
apache__flink
flink-python/src/main/java/org/apache/flink/streaming/api/runners/python/beam/state/AbstractBeamStateHandler.java
{ "start": 1044, "end": 1765 }
class ____<S> implements BeamStateHandler<S> { public BeamFnApi.StateResponse.Builder handle(BeamFnApi.StateRequest request, S state) throws Exception { switch (request.getRequestCase()) { case GET: return handleGet(request, state); case APPEND: return handleAppend(request, state); case CLEAR: return handleClear(request, state); default: throw new RuntimeException( String.format( "Unsupported request type %s for user state.", request.getRequestCase())); } } }
AbstractBeamStateHandler
java
elastic__elasticsearch
server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java
{ "start": 99351, "end": 99421 }
class ____ behaviour of original {@link Translog} */ static
mimic
java
elastic__elasticsearch
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfig.java
{ "start": 9037, "end": 9352 }
enum ____ { MAX, MEAN; public static SpanScoreFunction fromString(String value) { return valueOf(value.toUpperCase(Locale.ROOT)); } @Override public String toString() { return name().toLowerCase(Locale.ROOT); } } }
SpanScoreFunction
java
apache__hadoop
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBooleanWritable.java
{ "start": 1090, "end": 3468 }
class ____ { @Test public void testCompareUnequalWritables() throws Exception { DataOutputBuffer bTrue = writeWritable(new BooleanWritable(true)); DataOutputBuffer bFalse = writeWritable(new BooleanWritable(false)); WritableComparator writableComparator = WritableComparator.get(BooleanWritable.class); assertEquals(0, compare(writableComparator, bTrue, bTrue)); assertEquals(0, compare(writableComparator, bFalse, bFalse)); assertEquals(1, compare(writableComparator, bTrue, bFalse)); assertEquals(-1, compare(writableComparator, bFalse, bTrue)); } private int compare(WritableComparator writableComparator, DataOutputBuffer buf1, DataOutputBuffer buf2) { return writableComparator.compare(buf1.getData(), 0, buf1.size(), buf2.getData(), 0, buf2.size()); } protected DataOutputBuffer writeWritable(Writable writable) throws IOException { DataOutputBuffer out = new DataOutputBuffer(1024); writable.write(out); out.flush(); return out; } /** * test {@link BooleanWritable} methods hashCode(), equals(), compareTo() */ @Test public void testCommonMethods() { assertTrue(newInstance(true).equals(newInstance(true)), "testCommonMethods1 error !!!"); assertTrue(newInstance(false).equals(newInstance(false)), "testCommonMethods2 error !!!"); assertFalse(newInstance(false).equals(newInstance(true)), "testCommonMethods3 error !!!"); assertTrue(checkHashCode(newInstance(true), newInstance(true)), "testCommonMethods4 error !!!"); assertFalse(checkHashCode(newInstance(true), newInstance(false)), "testCommonMethods5 error !!! "); assertTrue(newInstance(true).compareTo(newInstance(false)) > 0, "testCommonMethods6 error !!!" ); assertTrue(newInstance(false).compareTo(newInstance(true)) < 0, "testCommonMethods7 error !!!" ); assertTrue(newInstance(false).compareTo(newInstance(false)) == 0, "testCommonMethods8 error !!!" ); assertEquals("true", newInstance(true).toString(), "testCommonMethods9 error !!!"); } private boolean checkHashCode(BooleanWritable f, BooleanWritable s) { return f.hashCode() == s.hashCode(); } private static BooleanWritable newInstance(boolean flag) { return new BooleanWritable(flag); } }
TestBooleanWritable
java
elastic__elasticsearch
server/src/test/java/org/elasticsearch/tasks/TaskManagerTests.java
{ "start": 22661, "end": 23428 }
class ____ extends AbstractTransportRequest { private final String requestId; CancellableRequest(String requestId) { this.requestId = requestId; } @Override public Task createTask(long id, String type, String action, TaskId parentTaskId, Map<String, String> headers) { return new CancellableTask(id, type, action, "request-" + requestId, parentTaskId, headers) { @Override public boolean shouldCancelChildrenOnCancellation() { return false; } @Override public String toString() { return getDescription(); } }; } } static
CancellableRequest
java
apache__flink
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/aggfunctions/MinWithRetractAggFunctionTest.java
{ "start": 2811, "end": 3525 }
class ____ extends NumberMinWithRetractAggFunctionTestBase<Byte> { @Override protected Byte getMinValue() { return Byte.MIN_VALUE + 1; } @Override protected Byte getMaxValue() { return Byte.MAX_VALUE - 1; } @Override protected Byte getValue(String v) { return Byte.valueOf(v); } @Override protected AggregateFunction<Byte, MinWithRetractAccumulator<Byte>> getAggregator() { return new MinWithRetractAggFunction<>(DataTypes.TINYINT().getLogicalType()); } } /** Test for {@link SmallIntType}. */ @Nested final
ByteMinWithRetractAggFunctionTest
java
apache__kafka
metadata/src/main/java/org/apache/kafka/controller/QuorumController.java
{ "start": 22936, "end": 26195 }
class ____ implements PeriodicTaskControlManager.QueueAccessor { @Override public void scheduleDeferred( String tag, long deadlineNs, Supplier<ControllerResult<Void>> op ) { EnumSet<ControllerOperationFlag> flags = EnumSet.of(DOES_NOT_UPDATE_QUEUE_TIME); queue.scheduleDeferred(tag, new EarliestDeadlineFunction(deadlineNs), new ControllerWriteEvent<>(tag, op::get, flags)); } @Override public void cancelDeferred(String tag) { queue.cancelDeferred(tag); } } private OptionalInt latestController() { return raftClient.leaderAndEpoch().leaderId(); } private void handleEventEnd(String name, long startProcessingTimeNs) { long endProcessingTime = time.nanoseconds(); long deltaNs = endProcessingTime - startProcessingTimeNs; log.debug("Processed {} in {} us", name, MICROSECONDS.convert(deltaNs, NANOSECONDS)); performanceMonitor.observeEvent(name, deltaNs); controllerMetrics.updateEventQueueProcessingTime(NANOSECONDS.toMillis(deltaNs)); } private Throwable handleEventException( String name, OptionalLong startProcessingTimeNs, Throwable exception ) { OptionalLong deltaUs; if (startProcessingTimeNs.isPresent()) { long endProcessingTime = time.nanoseconds(); long deltaNs = endProcessingTime - startProcessingTimeNs.getAsLong(); performanceMonitor.observeEvent(name, deltaNs); controllerMetrics.updateEventQueueProcessingTime(NANOSECONDS.toMillis(deltaNs)); deltaUs = OptionalLong.of(MICROSECONDS.convert(deltaNs, NANOSECONDS)); } else { deltaUs = OptionalLong.empty(); } EventHandlerExceptionInfo info = EventHandlerExceptionInfo. fromInternal(exception, this::latestController); int epoch = curClaimEpoch; if (epoch == -1) { epoch = offsetControl.lastCommittedEpoch(); } String failureMessage = info.failureMessage(epoch, deltaUs, isActiveController(), offsetControl.lastCommittedOffset()); if (info.isTimeoutException() && (deltaUs.isEmpty())) { controllerMetrics.incrementOperationsTimedOut(); } if (info.isFault()) { nonFatalFaultHandler.handleFault(name + ": " + failureMessage, exception); } else { log.info("{}: {}", name, failureMessage); } if (info.causesFailover() && isActiveController()) { renounce(); } return info.effectiveExternalException(); } private long updateEventStartMetricsAndGetTime(OptionalLong eventCreatedTimeNs) { long now = time.nanoseconds(); controllerMetrics.incrementOperationsStarted(); if (eventCreatedTimeNs.isPresent()) { controllerMetrics.updateEventQueueTime(NANOSECONDS.toMillis(now - eventCreatedTimeNs.getAsLong())); } return now; } /** * A controller event for handling internal state changes, such as Raft inputs. */
PeriodicTaskControlManagerQueueAccessor
java
spring-cloud__spring-cloud-gateway
spring-cloud-gateway-server-webflux/src/main/java/org/springframework/cloud/gateway/support/ShortcutConfigurable.java
{ "start": 6622, "end": 7184 }
class ____ extends BeanFactoryResolver { public GatewayBeanFactoryResolver(BeanFactory beanFactory) { super(beanFactory); } @Override public Object resolve(EvaluationContext context, String beanName) throws AccessException { if (SYSTEM_ENVIRONMENT_BEAN_NAME.equals(beanName) || SYSTEM_PROPERTIES_BEAN_NAME.equals(beanName)) { throw new AccessException(beanName + " is not accessible when spring.cloud.gateway.restrictive-property-accessor.enabled=true"); } return super.resolve(context, beanName); } }
GatewayBeanFactoryResolver
java
apache__dubbo
dubbo-plugin/dubbo-rest-jaxrs/src/test/java/org/apache/dubbo/rpc/protocol/tri/rest/support/jaxrs/compatible/intercept/DynamicTraceInterceptor.java
{ "start": 1283, "end": 1928 }
class ____ implements ReaderInterceptor, WriterInterceptor { public DynamicTraceInterceptor() {} @Override public Object aroundReadFrom(ReaderInterceptorContext readerInterceptorContext) throws IOException, WebApplicationException { return readerInterceptorContext.proceed(); } @Override public void aroundWriteTo(WriterInterceptorContext writerInterceptorContext) throws IOException, WebApplicationException { writerInterceptorContext.getOutputStream().write("intercept".getBytes(StandardCharsets.UTF_8)); writerInterceptorContext.proceed(); } }
DynamicTraceInterceptor
java
elastic__elasticsearch
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ai21/request/Ai21ChatCompletionRequest.java
{ "start": 1087, "end": 1260 }
class ____ responsible for creating a request to the AI21 chat completion model. * It constructs an HTTP POST request with the necessary headers and body content. */ public
is
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java
{ "start": 9283, "end": 9562 }
interface ____ { void sync( ResyncReplicationRequest request, Task parentTask, String primaryAllocationId, long primaryTerm, ActionListener<ResyncReplicationResponse> listener ); } static
SyncAction
java
google__guice
core/src/com/google/inject/util/Modules.java
{ "start": 2032, "end": 2153 }
class ____ { private Modules() {} public static final Module EMPTY_MODULE = new EmptyModule(); private static
Modules
java
apache__flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/strategies/ObjectOfInputTypeStrategy.java
{ "start": 7070, "end": 7283 }
class ____>, [<key>, <value> [, <key>, <value> , ...]] ) final List<Signature.Argument> arguments = new ArrayList<>(); // Class name (required) final Argument classArgument = Argument.of("
name
java
alibaba__druid
core/src/main/java/com/alibaba/druid/support/http/util/IPAddress.java
{ "start": 3195, "end": 6343 }
class ____ IP address, otherwise * returne <code>false</code>. */ public final boolean isClassC() { return (ipAddress & 0x00000007) == 3; } // ------------------------------------------------------------------------- /** * Convert a decimal-dotted notation representation of an IP address into an 32 bits interger value. * * @param ipAddressStr Decimal-dotted notation (xxx.xxx.xxx.xxx) of the IP address. * @return Return the 32 bits integer representation of the IP address. * @throws InvalidIPAddressException Throws this exception if the specified IP address is not compliant to the * decimal-dotted notation xxx.xxx.xxx.xxx. */ final int parseIPAddress(String ipAddressStr) { int result = 0; if (ipAddressStr == null) { throw new IllegalArgumentException(); } try { String tmp = ipAddressStr; // get the 3 first numbers int offset = 0; for (int i = 0; i < 3; i++) { // get the position of the first dot int index = tmp.indexOf('.'); // if there is not a dot then the ip string representation is // not compliant to the decimal-dotted notation. if (index != -1) { // get the number before the dot and convert it into // an integer. String numberStr = tmp.substring(0, index); int number = Integer.parseInt(numberStr); if ((number < 0) || (number > 255)) { throw new IllegalArgumentException("Invalid IP Address [" + ipAddressStr + "]"); } result += number << offset; offset += 8; tmp = tmp.substring(index + 1); } else { throw new IllegalArgumentException("Invalid IP Address [" + ipAddressStr + "]"); } } // the remaining part of the string should be the last number. if (tmp.length() > 0) { int number = Integer.parseInt(tmp); if ((number < 0) || (number > 255)) { throw new IllegalArgumentException("Invalid IP Address [" + ipAddressStr + "]"); } result += number << offset; ipAddress = result; } else { throw new IllegalArgumentException("Invalid IP Address [" + ipAddressStr + "]"); } } catch (NoSuchElementException ex) { throw new IllegalArgumentException("Invalid IP Address [" + ipAddressStr + "]", ex); } catch (NumberFormatException ex) { throw new IllegalArgumentException("Invalid IP Address [" + ipAddressStr + "]", ex); } return result; } public int hashCode() { return this.ipAddress; } public boolean equals(Object another) { return another instanceof IPAddress && ipAddress == ((IPAddress) another).ipAddress; } }
C
java
junit-team__junit5
documentation/src/test/java/example/ParameterizedMigrationDemo.java
{ "start": 867, "end": 1673 }
class ____ { @Parameterized.Parameters public static Iterable<Object[]> data() { return Arrays.asList(new Object[][] { { 1, "foo" }, { 2, "bar" } }); } // end::before[] @SuppressWarnings("DefaultAnnotationParam") // tag::before[] @Parameterized.Parameter(0) public int number; @Parameterized.Parameter(1) public String text; @Parameterized.BeforeParam public static void before(int number, String text) { } @Parameterized.AfterParam public static void after() { } @org.junit.Test public void someTest() { } @org.junit.Test public void anotherTest() { } } // end::before[] @SuppressWarnings("JUnitMalformedDeclaration") // tag::after[] @ParameterizedClass @MethodSource("data") // end::after[] static // tag::after[]
JUnit4ParameterizedClassTests
java
apache__kafka
clients/src/main/java/org/apache/kafka/common/security/kerberos/KerberosLogin.java
{ "start": 1958, "end": 18953 }
class ____ extends AbstractLogin { private static final Logger log = LoggerFactory.getLogger(KerberosLogin.class); private static final Random RNG = new Random(); private final Time time = Time.SYSTEM; private Thread t; private boolean isKrbTicket; private boolean isUsingTicketCache; private String principal; // LoginThread will sleep until 80% of time from last refresh to // ticket's expiry has been reached, at which time it will wake // and try to renew the ticket. private double ticketRenewWindowFactor; /** * Percentage of random jitter added to the renewal time */ private double ticketRenewJitter; // Regardless of ticketRenewWindowFactor setting above and the ticket expiry time, // thread will not sleep between refresh attempts any less than 1 minute (60*1000 milliseconds = 1 minute). // Change the '1' to e.g. 5, to change this to 5 minutes. private long minTimeBeforeRelogin; private String kinitCmd; private volatile Subject subject; private LoginContext loginContext; private String serviceName; private long lastLogin; @Override public void configure(Map<String, ?> configs, String contextName, Configuration configuration, AuthenticateCallbackHandler callbackHandler) { super.configure(configs, contextName, configuration, callbackHandler); this.ticketRenewWindowFactor = (Double) configs.get(SaslConfigs.SASL_KERBEROS_TICKET_RENEW_WINDOW_FACTOR); this.ticketRenewJitter = (Double) configs.get(SaslConfigs.SASL_KERBEROS_TICKET_RENEW_JITTER); this.minTimeBeforeRelogin = (Long) configs.get(SaslConfigs.SASL_KERBEROS_MIN_TIME_BEFORE_RELOGIN); this.kinitCmd = (String) configs.get(SaslConfigs.SASL_KERBEROS_KINIT_CMD); this.serviceName = getServiceName(configs, contextName, configuration); } /** * Performs login for each login module specified for the login context of this instance and starts the thread used * to periodically re-login to the Kerberos Ticket Granting Server. */ @Override public LoginContext login() throws LoginException { this.lastLogin = currentElapsedTime(); loginContext = super.login(); subject = loginContext.getSubject(); isKrbTicket = !subject.getPrivateCredentials(KerberosTicket.class).isEmpty(); AppConfigurationEntry[] entries = configuration().getAppConfigurationEntry(contextName()); if (entries.length == 0) { isUsingTicketCache = false; principal = null; } else { // there will only be a single entry AppConfigurationEntry entry = entries[0]; if (entry.getOptions().get("useTicketCache") != null) { String val = (String) entry.getOptions().get("useTicketCache"); isUsingTicketCache = val.equals("true"); } else isUsingTicketCache = false; if (entry.getOptions().get("principal") != null) principal = (String) entry.getOptions().get("principal"); else principal = null; } if (!isKrbTicket) { log.debug("[Principal={}]: It is not a Kerberos ticket", principal); t = null; // if no TGT, do not bother with ticket management. return loginContext; } log.debug("[Principal={}]: It is a Kerberos ticket", principal); // Refresh the Ticket Granting Ticket (TGT) periodically. How often to refresh is determined by the // TGT's existing expiry date and the configured minTimeBeforeRelogin. For testing and development, // you can decrease the interval of expiration of tickets (for example, to 3 minutes) by running: // "modprinc -maxlife 3mins <principal>" in kadmin. t = KafkaThread.daemon(String.format("kafka-kerberos-refresh-thread-%s", principal), () -> { log.info("[Principal={}]: TGT refresh thread started.", principal); while (true) { // renewal thread's main loop. if it exits from here, thread will exit. KerberosTicket tgt = getTGT(); long now = currentWallTime(); long nextRefresh; Date nextRefreshDate; if (tgt == null) { nextRefresh = now + minTimeBeforeRelogin; nextRefreshDate = new Date(nextRefresh); log.warn("[Principal={}]: No TGT found: will try again at {}", principal, nextRefreshDate); } else { nextRefresh = getRefreshTime(tgt); long expiry = tgt.getEndTime().getTime(); Date expiryDate = new Date(expiry); if (isUsingTicketCache && tgt.getRenewTill() != null && tgt.getRenewTill().getTime() < expiry) { log.warn("The TGT cannot be renewed beyond the next expiry date: {}." + "This process will not be able to authenticate new SASL connections after that " + "time (for example, it will not be able to authenticate a new connection with a Kafka " + "Broker). Ask your system administrator to either increase the " + "'renew until' time by doing : 'modprinc -maxrenewlife {} ' within " + "kadmin, or instead, to generate a keytab for {}. Because the TGT's " + "expiry cannot be further extended by refreshing, exiting refresh thread now.", expiryDate, principal, principal); return; } // determine how long to sleep from looking at ticket's expiry. // We should not allow the ticket to expire, but we should take into consideration // minTimeBeforeRelogin. Will not sleep less than minTimeBeforeRelogin, unless doing so // would cause ticket expiration. if ((nextRefresh > expiry) || (minTimeBeforeRelogin > expiry - now)) { // expiry is before next scheduled refresh. log.info("[Principal={}]: Refreshing now because expiry is before next scheduled refresh time.", principal); nextRefresh = now; } else { if (nextRefresh - now < minTimeBeforeRelogin) { // next scheduled refresh is sooner than (now + MIN_TIME_BEFORE_LOGIN). Date until = new Date(nextRefresh); Date newUntil = new Date(now + minTimeBeforeRelogin); log.warn("[Principal={}]: TGT refresh thread time adjusted from {} to {} since the former is sooner " + "than the minimum refresh interval ({} seconds) from now.", principal, until, newUntil, minTimeBeforeRelogin / 1000); } nextRefresh = Math.max(nextRefresh, now + minTimeBeforeRelogin); } nextRefreshDate = new Date(nextRefresh); if (nextRefresh > expiry) { log.error("[Principal={}]: Next refresh: {} is later than expiry {}. This may indicate a clock skew problem." + "Check that this host and the KDC hosts' clocks are in sync. Exiting refresh thread.", principal, nextRefreshDate, expiryDate); return; } } if (now < nextRefresh) { Date until = new Date(nextRefresh); log.info("[Principal={}]: TGT refresh sleeping until: {}", principal, until); try { Thread.sleep(nextRefresh - now); } catch (InterruptedException ie) { log.warn("[Principal={}]: TGT renewal thread has been interrupted and will exit.", principal); return; } } else { log.error("[Principal={}]: NextRefresh: {} is in the past: exiting refresh thread. Check" + " clock sync between this host and KDC - (KDC's clock is likely ahead of this host)." + " Manual intervention will be required for this client to successfully authenticate." + " Exiting refresh thread.", principal, nextRefreshDate); return; } if (isUsingTicketCache) { String kinitArgs = "-R"; int retry = 1; while (retry >= 0) { try { log.debug("[Principal={}]: Running ticket cache refresh command: {} {}", principal, kinitCmd, kinitArgs); Shell.execCommand(kinitCmd, kinitArgs); break; } catch (Exception e) { if (retry > 0) { log.warn("[Principal={}]: Error when trying to renew with TicketCache, but will retry ", principal, e); --retry; // sleep for 10 seconds try { Thread.sleep(10 * 1000); } catch (InterruptedException ie) { log.error("[Principal={}]: Interrupted while renewing TGT, exiting Login thread", principal); return; } } else { log.warn("[Principal={}]: Could not renew TGT due to problem running shell command: '{} {}'. " + "Exiting refresh thread.", principal, kinitCmd, kinitArgs, e); return; } } } } try { int retry = 1; while (retry >= 0) { try { reLogin(); break; } catch (LoginException le) { if (retry > 0) { log.warn("[Principal={}]: Error when trying to re-Login, but will retry ", principal, le); --retry; // sleep for 10 seconds. try { Thread.sleep(10 * 1000); } catch (InterruptedException e) { log.error("[Principal={}]: Interrupted during login retry after LoginException:", principal, le); throw le; } } else { log.error("[Principal={}]: Could not refresh TGT.", principal, le); } } } } catch (LoginException le) { log.error("[Principal={}]: Failed to refresh TGT: refresh thread exiting now.", principal, le); return; } } }); t.start(); return loginContext; } @Override public void close() { if ((t != null) && (t.isAlive())) { t.interrupt(); try { t.join(); } catch (InterruptedException e) { log.warn("[Principal={}]: Error while waiting for Login thread to shutdown.", principal, e); Thread.currentThread().interrupt(); } } } @Override public Subject subject() { return subject; } @Override public String serviceName() { return serviceName; } private static String getServiceName(Map<String, ?> configs, String contextName, Configuration configuration) { List<AppConfigurationEntry> configEntries = Arrays.asList(configuration.getAppConfigurationEntry(contextName)); String jaasServiceName = JaasContext.configEntryOption(configEntries, JaasUtils.SERVICE_NAME, null); String configServiceName = (String) configs.get(SaslConfigs.SASL_KERBEROS_SERVICE_NAME); if (jaasServiceName != null && configServiceName != null && !jaasServiceName.equals(configServiceName)) { String message = String.format("Conflicting serviceName values found in JAAS and Kafka configs " + "value in JAAS file %s, value in Kafka config %s", jaasServiceName, configServiceName); throw new IllegalArgumentException(message); } if (jaasServiceName != null) return jaasServiceName; if (configServiceName != null) return configServiceName; throw new IllegalArgumentException("No serviceName defined in either JAAS or Kafka config"); } private long getRefreshTime(KerberosTicket tgt) { long start = tgt.getStartTime().getTime(); long expires = tgt.getEndTime().getTime(); log.info("[Principal={}]: TGT valid starting at: {}", principal, tgt.getStartTime()); log.info("[Principal={}]: TGT expires: {}", principal, tgt.getEndTime()); long proposedRefresh = start + (long) ((expires - start) * (ticketRenewWindowFactor + (ticketRenewJitter * RNG.nextDouble()))); if (proposedRefresh > expires) // proposedRefresh is too far in the future: it's after ticket expires: simply return now. return currentWallTime(); else return proposedRefresh; } private KerberosTicket getTGT() { Set<KerberosTicket> tickets = subject.getPrivateCredentials(KerberosTicket.class); for (KerberosTicket ticket : tickets) { KerberosPrincipal server = ticket.getServer(); if (server.getName().equals("krbtgt/" + server.getRealm() + "@" + server.getRealm())) { log.debug("Found TGT with client principal '{}' and server principal '{}'.", ticket.getClient().getName(), ticket.getServer().getName()); return ticket; } } return null; } private boolean hasSufficientTimeElapsed() { long now = currentElapsedTime(); if (now - lastLogin < minTimeBeforeRelogin) { log.warn("[Principal={}]: Not attempting to re-login since the last re-login was attempted less than {} seconds before.", principal, minTimeBeforeRelogin / 1000); return false; } return true; } /** * Re-login a principal. This method assumes that {@link #login()} has happened already. * @throws javax.security.auth.login.LoginException on a failure */ protected void reLogin() throws LoginException { if (!isKrbTicket) { return; } if (loginContext == null) { throw new LoginException("Login must be done first"); } if (!hasSufficientTimeElapsed()) { return; } synchronized (KerberosLogin.class) { log.info("Initiating logout for {}", principal); // register most recent relogin attempt lastLogin = currentElapsedTime(); //clear up the kerberos state. But the tokens are not cleared! As per //the Java kerberos login module code, only the kerberos credentials //are cleared. If previous logout succeeded but login failed, we shouldn't //logout again since duplicate logout causes NPE from Java 9 onwards. if (subject != null && !subject.getPrincipals().isEmpty()) { logout(); } //login and also update the subject field of this instance to //have the new credentials (pass it to the LoginContext constructor) loginContext = new LoginContext(contextName(), subject, null, configuration()); log.info("Initiating re-login for {}", principal); login(loginContext); } } // Visibility to override for testing protected void login(LoginContext loginContext) throws LoginException { loginContext.login(); } // Visibility to override for testing protected void logout() throws LoginException { loginContext.logout(); } private long currentElapsedTime() { return time.hiResClockMs(); } private long currentWallTime() { return time.milliseconds(); } }
KerberosLogin
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/index/shard/GlobalCheckpointListeners.java
{ "start": 1605, "end": 1828 }
class ____ implements Closeable { /** * A global checkpoint listener consisting of a callback that is notified when the global checkpoint is updated or the shard is closed. */ public
GlobalCheckpointListeners
java
eclipse-vertx__vert.x
vertx-core/src/main/java/io/vertx/core/spi/metrics/TransportMetrics.java
{ "start": 811, "end": 1792 }
interface ____<S> extends NetworkMetrics<S> { /** * @return the transport type, {@code tcp} or {@code quic} */ String type(); /** * Called when a client has connected, which is applicable for connections.<p/> * * The remote name of the client is a best effort to provide the name of the remote host, i.e. if the name * is specified at creation time, this name will be used otherwise it will be the remote address. * * @param remoteAddress the remote address of the client * @param remoteName the remote name of the client * @return the socket metric */ default S connected(SocketAddress remoteAddress, String remoteName) { return null; } /** * Called when a client has disconnected, which is applicable for connections. * * @param socketMetric the socket metric * @param remoteAddress the remote address of the client */ default void disconnected(S socketMetric, SocketAddress remoteAddress) { } }
TransportMetrics
java
spring-projects__spring-framework
spring-context/src/main/java/org/springframework/scheduling/annotation/AbstractAsyncConfiguration.java
{ "start": 1444, "end": 1719 }
class ____ common structure for enabling * Spring's asynchronous method execution capability. * * @author Chris Beams * @author Juergen Hoeller * @author Stephane Nicoll * @since 3.1 * @see EnableAsync */ @Configuration(proxyBeanMethods = false) public abstract
providing
java
quarkusio__quarkus
extensions/hibernate-search-orm-elasticsearch/deployment/src/test/java/io/quarkus/hibernate/search/orm/elasticsearch/test/configuration/NoConfigNoIndexedEntityNamedPuTest.java
{ "start": 508, "end": 1312 }
class ____ { @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer( () -> ShrinkWrap.create(JavaArchive.class)) .withConfigurationResource("application-nohsearchconfig-named-pu.properties"); // When having no indexed entities, no configuration, no datasource, // as long as the Hibernate Search beans are not injected anywhere, // we should still be able to start the application. @Test public void testBootSucceedsButHibernateSearchDeactivated() { // ... but Hibernate Search's beans should not be available. assertThat(Arc.container().instance(SearchMapping.class, new PersistenceUnit.PersistenceUnitLiteral("PU1")).get()) .isNull(); } }
NoConfigNoIndexedEntityNamedPuTest
java
apache__maven
its/core-it-support/core-it-plugins/maven-it-plugin-dependency-resolution/src/main/java/org/apache/maven/plugin/coreit/AggregateTestMojo.java
{ "start": 2821, "end": 4189 }
class ____ checksums will not be calculated. */ @Parameter(property = "depres.testClassPathChecksums") private String testClassPathChecksums; /** * The Maven projects in the reactor. */ @Parameter(defaultValue = "${reactorProjects}", readonly = true) private List<MavenProject> reactorProjects; /** * Runs this mojo. * * @throws MojoExecutionException If the output file could not be created or any dependency could not be resolved. */ public void execute() throws MojoExecutionException { try { for (MavenProject project : reactorProjects) { writeArtifacts(filter(projectArtifacts, project), project.getArtifacts()); writeClassPath(filter(testClassPath, project), project.getTestClasspathElements()); writeClassPathChecksums(filter(testClassPathChecksums, project), project.getTestClasspathElements()); } } catch (DependencyResolutionRequiredException e) { throw new MojoExecutionException("Failed to resolve dependencies", e); } } private String filter(String filename, MavenProject project) { String result = filename; if (filename != null) { result = result.replaceAll("@artifactId@", project.getArtifactId()); } return result; } }
path
java
assertj__assertj-core
assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingDefaultComparator_Test.java
{ "start": 1201, "end": 1894 }
class ____ extends ObjectArrayAssertBaseTest { private Comparator<Object[]> comparator = alwaysEqual(); private ObjectArrays arraysBefore; @BeforeEach void before() { arraysBefore = getArrays(assertions); assertions.usingComparator(comparator); } @Override protected ObjectArrayAssert<Object> invoke_api_method() { return assertions.usingDefaultComparator(); } @Override protected void verify_internal_effects() { assertThat(getArrays(assertions).getComparator()).isNull(); assertThat(getObjects(assertions)).isSameAs(Objects.instance()); assertThat(getArrays(assertions)).isSameAs(arraysBefore); } }
ObjectArrayAssert_usingDefaultComparator_Test
java
spring-projects__spring-boot
module/spring-boot-jersey/src/test/java/org/springframework/boot/jersey/autoconfigure/metrics/JerseyServerMetricsAutoConfigurationTests.java
{ "start": 5850, "end": 6040 }
class ____ { @Bean MeterObservationHandler<Context> meterObservationHandler(MeterRegistry registry) { return new DefaultMeterObservationHandler(registry); } } }
MetricsConfiguration
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/JUnit4SetUpNotRunTest.java
{ "start": 7887, "end": 8280 }
class ____ { @Before public void setUp() {} } @Test public void noBeforeOnClasspath() { compilationHelper .addSourceLines( "Test.java", "import org.junit.runner.RunWith;", "import org.junit.runners.JUnit4;", "import " + SuperTest.class.getCanonicalName() + ";", "@RunWith(JUnit4.class)", "
SuperTest
java
quarkusio__quarkus
extensions/resteasy-classic/resteasy-client-jsonb/deployment/src/test/java/io/quarkus/restclient/jsonb/deployment/ClientResource.java
{ "start": 303, "end": 761 }
class ____ { @Inject @RestClient RestInterface restInterface; @GET @Path("/hello") public String hello() { DateDto dateDto = restInterface.get(); ZonedDateTime zonedDateTime = dateDto.getDate(); if (zonedDateTime.getMonth().equals(Month.NOVEMBER) && zonedDateTime.getZone().equals(ZoneId.of("Europe/Paris"))) { return "OK"; } return "INVALID"; } }
ClientResource
java
google__error-prone
core/src/main/java/com/google/errorprone/bugpatterns/DoNotCallChecker.java
{ "start": 8338, "end": 8713 }
class ____ the execution point" + " represented by this StackWalker using getCallerClass") .put( instanceMethod().onExactClass("java.lang.StackWalker$StackFrame").named("getClass"), "Calling getClass on StackFrame returns the Class object for StackFrame, you probably" + " meant to retrieve the
containing
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestAsyncRpcProtocolPBUtil.java
{ "start": 2010, "end": 4638 }
class ____ { private static final Logger LOG = LoggerFactory.getLogger(TestAsyncRpcProtocolPBUtil.class); private static final int SERVER_PROCESS_COST_MS = 100; private TestClientProtocolTranslatorPB clientPB; private Server rpcServer; @BeforeEach public void setUp() throws IOException { AsyncRpcProtocolPBUtil.setAsyncResponderExecutor(ForkJoinPool.commonPool()); Configuration conf = new Configuration(); RPC.setProtocolEngine(conf, TestRpcBase.TestRpcService.class, ProtobufRpcEngine2.class); // Create server side implementation TestClientProtocolServerSideTranslatorPB serverImpl = new TestClientProtocolServerSideTranslatorPB(SERVER_PROCESS_COST_MS); BlockingService service = TestRpcServiceProtos.TestProtobufRpcProto .newReflectiveBlockingService(serverImpl); // start the IPC server rpcServer = new RPC.Builder(conf) .setProtocol(TestRpcBase.TestRpcService.class) .setInstance(service).setBindAddress("0.0.0.0") .setPort(0).setNumHandlers(1).setVerbose(true).build(); rpcServer.start(); InetSocketAddress addr = NetUtils.getConnectAddress(rpcServer); TestRpcBase.TestRpcService proxy = RPC.getProxy(TestRpcBase.TestRpcService.class, TestRPC.TestProtocol.versionID, addr, conf); clientPB = new TestClientProtocolTranslatorPB(proxy); Client.setAsynchronousMode(true); clientPB.ping(); } @AfterEach public void clear() { if (clientPB != null) { clientPB.close(); } if (rpcServer != null) { rpcServer.stop(); } } @Test public void testAsyncIpcClient() throws Exception { Client.setAsynchronousMode(true); long start = Time.monotonicNow(); clientPB.add(1, 2); long cost = Time.monotonicNow() - start; LOG.info("rpc client add {} {}, cost: {}ms", 1, 2, cost); Integer res = syncReturn(Integer.class); checkResult(3, res, cost); start = Time.monotonicNow(); clientPB.echo("test echo!"); cost = Time.monotonicNow() - start; LOG.info("rpc client echo {}, cost: {}ms", "test echo!", cost); String value = syncReturn(String.class); checkResult("test echo!", value, cost); start = Time.monotonicNow(); clientPB.error(); LOG.info("rpc client error, cost: {}ms", Time.monotonicNow() - start); LambdaTestUtils.intercept(RemoteException.class, "test!", () -> AsyncUtil.syncReturn(String.class)); } private void checkResult(Object expected, Object actual, long cost) { assertTrue(cost < SERVER_PROCESS_COST_MS); assertEquals(expected, actual); } }
TestAsyncRpcProtocolPBUtil
java
elastic__elasticsearch
server/src/test/java/org/elasticsearch/health/GetHealthResponseTests.java
{ "start": 1131, "end": 8202 }
class ____ extends ESTestCase { public void testToXContent() throws IOException { List<HealthIndicatorResult> indicatorResults = new ArrayList<>(2); indicatorResults.add(createRandomIndicatorResult()); indicatorResults.add(createRandomIndicatorResult()); Response response = new Response(ClusterName.DEFAULT, indicatorResults, true); XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); response.toXContentChunked(EMPTY_PARAMS).forEachRemaining(xcontent -> { try { xcontent.toXContent(builder, EMPTY_PARAMS); } catch (IOException e) { logger.error(e.getMessage(), e); fail(e.getMessage()); } }); Map<String, Object> xContentMap = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); assertThat(xContentMap.get("status"), is(response.getStatus().xContentValue())); assertXContentContainsExpectedResult(xContentMap, indicatorResults.get(0).name(), indicatorResults.get(0)); assertXContentContainsExpectedResult(xContentMap, indicatorResults.get(1).name(), indicatorResults.get(1)); } @SuppressWarnings("unchecked") private void assertXContentContainsExpectedResult( Map<String, Object> xContentMap, String indicatorName, HealthIndicatorResult expectedIndicatorResult ) { Map<String, Object> allIndicators = (Map<String, Object>) xContentMap.get("indicators"); Map<String, Object> indicator = (Map<String, Object>) allIndicators.get(indicatorName); List<Map<String, Object>> impacts = (List<Map<String, Object>>) indicator.get("impacts"); List<HealthIndicatorImpact> fromXContentImpacts = new ArrayList<>(impacts.size()); for (Map<String, Object> impact : impacts) { List<ImpactArea> impactAreasList = new ArrayList<>(); List<String> fromXContentImpactAreas = (List<String>) impact.get("impact_areas"); for (String impactArea : fromXContentImpactAreas) { impactAreasList.add(ImpactArea.valueOf(impactArea.toUpperCase(Locale.ROOT))); } String xcontentId = (String) impact.get("id"); fromXContentImpacts.add( new HealthIndicatorImpact( indicatorName, // TODO add some methods to handle the id and xcontent id transitions xcontentId.substring(xcontentId.lastIndexOf(":") + 1), (Integer) impact.get("severity"), (String) impact.get("description"), impactAreasList ) ); } assertThat(fromXContentImpacts, is(expectedIndicatorResult.impacts())); List<Map<String, Object>> diagnosisList = (List<Map<String, Object>>) indicator.get("diagnosis"); List<Diagnosis> fromXContentDiagnosis = new ArrayList<>(diagnosisList.size()); for (Map<String, Object> diagnosisMap : diagnosisList) { String xcontentId = (String) diagnosisMap.get("id"); Map<String, Object> affectedResources = (Map<String, Object>) diagnosisMap.get("affected_resources"); Diagnosis.Resource affectedIndices = new Diagnosis.Resource( Diagnosis.Resource.Type.INDEX, (List<String>) affectedResources.get("indices") ); fromXContentDiagnosis.add( new Diagnosis( new Diagnosis.Definition( indicatorName, xcontentId.substring(xcontentId.lastIndexOf(":") + 1), (String) diagnosisMap.get("cause"), (String) diagnosisMap.get("action"), (String) diagnosisMap.get("help_url") ), List.of(affectedIndices) ) ); } assertThat(fromXContentDiagnosis, is(expectedIndicatorResult.diagnosisList())); HealthIndicatorResult fromXContentIndicatorResult = new HealthIndicatorResult( indicatorName, HealthStatus.valueOf(((String) indicator.get("status")).toUpperCase(Locale.ROOT)), (String) indicator.get("symptom"), new SimpleHealthIndicatorDetails((Map<String, Object>) indicator.get("details")), fromXContentImpacts, fromXContentDiagnosis ); assertThat(fromXContentIndicatorResult, is(expectedIndicatorResult)); } private static HealthIndicatorResult createRandomIndicatorResult() { String name = randomAlphaOfLength(10); HealthStatus status = randomFrom(HealthStatus.RED, HealthStatus.YELLOW, HealthStatus.GREEN); String symptom = randomAlphaOfLength(20); Map<String, Object> detailsMap = new HashMap<>(); detailsMap.put(randomAlphaOfLengthBetween(5, 50), randomAlphaOfLengthBetween(5, 50)); HealthIndicatorDetails details = new SimpleHealthIndicatorDetails(detailsMap); List<HealthIndicatorImpact> impacts = new ArrayList<>(); String impact1Id = randomAlphaOfLength(30); int impact1Severity = randomIntBetween(1, 5); String impact1Description = randomAlphaOfLength(30); ImpactArea firstImpactArea = randomFrom(ImpactArea.values()); impacts.add(new HealthIndicatorImpact(name, impact1Id, impact1Severity, impact1Description, List.of(firstImpactArea))); String impact2Id = randomAlphaOfLength(30); int impact2Severity = randomIntBetween(1, 5); String impact2Description = randomAlphaOfLength(30); ImpactArea secondImpactArea = randomFrom(ImpactArea.values()); impacts.add(new HealthIndicatorImpact(name, impact2Id, impact2Severity, impact2Description, List.of(secondImpactArea))); List<Diagnosis> diagnosisList = new ArrayList<>(); Diagnosis.Resource resource1 = new Diagnosis.Resource(Diagnosis.Resource.Type.INDEX, List.of(randomAlphaOfLength(10))); Diagnosis diagnosis1 = new Diagnosis( new Diagnosis.Definition( name, randomAlphaOfLength(30), randomAlphaOfLength(50), randomAlphaOfLength(50), randomAlphaOfLength(30) ), List.of(resource1) ); diagnosisList.add(diagnosis1); Diagnosis.Resource resource2 = new Diagnosis.Resource(Diagnosis.Resource.Type.INDEX, List.of(randomAlphaOfLength(10))); Diagnosis diagnosis2 = new Diagnosis( new Diagnosis.Definition( name, randomAlphaOfLength(30), randomAlphaOfLength(50), randomAlphaOfLength(50), randomAlphaOfLength(30) ), List.of(resource2) ); diagnosisList.add(diagnosis2); return new HealthIndicatorResult(name, status, symptom, details, impacts, diagnosisList); } }
GetHealthResponseTests
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/persister/filter/internal/FilterHelper.java
{ "start": 1408, "end": 10166 }
class ____ { private static final Pattern FILTER_PARAMETER_PATTERN = Pattern.compile( ":((\\S+)(\\w+))" ); private final String[] filterNames; private final String[] filterConditions; private final boolean[] filterAutoAliasFlags; private final Map<String, String>[] filterAliasTableMaps; private final List<String>[] parameterNames; private final Map<String, String> tableToEntityName; public FilterHelper(List<FilterConfiguration> filters, SessionFactoryImplementor factory) { this( filters, null, factory ); } /** * The map of defined filters. This is expected to be in format * where the filter names are the map keys, and the defined * conditions are the values. * * @param filters The map of defined filters. * @param factory The session factory */ public FilterHelper(List<FilterConfiguration> filters, Map<String, String> tableToEntityName, SessionFactoryImplementor factory) { final int filterCount = filters.size(); filterNames = new String[filterCount]; filterConditions = new String[filterCount]; filterAutoAliasFlags = new boolean[filterCount]; filterAliasTableMaps = new Map[filterCount]; parameterNames = new List[filterCount]; this.tableToEntityName = tableToEntityName; for ( int i = 0; i < filters.size(); i++ ) { final var filter = filters.get( i ); final String filterName = safeInterning( filter.getName() ); filterNames[i] = filterName; filterConditions[i] = safeInterning( filter.getCondition() ); filterAliasTableMaps[i] = filter.getAliasTableMap( factory ); filterAutoAliasFlags[i] = false; injectAliases( factory, filter, i ); qualifyParameterNames( i, filterName ); } } private void injectAliases(SessionFactoryImplementor factory, FilterConfiguration filter, int filterCount) { if ( ( filterAliasTableMaps[filterCount].isEmpty() || isTableFromPersistentClass( filterAliasTableMaps[filterCount] ) ) && filter.useAutoAliasInjection() ) { final String autoAliasedCondition = Template.renderWhereStringTemplate( filter.getCondition(), MARKER, factory.getJdbcServices().getDialect(), factory.getTypeConfiguration() ); filterConditions[filterCount] = safeInterning( autoAliasedCondition ); filterAutoAliasFlags[filterCount] = true; } } /** * Look for parameters in the given condition. For each parameter, we: * <ol> * <li>keep track of the name for later</li> * <li>replace {@code :{param-name}} with {@code :{filter-name}.{param-name}} * in the condition</li> * </ol> */ private void qualifyParameterNames(int filterCount, String filterName) { final List<String> parameterNames = new ArrayList<>(); boolean foundAny = false; final var matcher = FILTER_PARAMETER_PATTERN.matcher( filterConditions[filterCount] ); while ( matcher.find() ) { parameterNames.add( matcher.group(1) ); foundAny = true; } if ( foundAny ) { filterConditions[filterCount] = safeInterning( matcher.replaceAll(":" + filterName + ".$1") ); } this.parameterNames[filterCount] = parameterNames; } private static boolean isTableFromPersistentClass(Map<String, String> aliasTableMap) { return aliasTableMap.size() == 1 && aliasTableMap.containsKey( null ); } public String[] getFilterNames() { return filterNames; } public boolean isAffectedBy(Map<String, Filter> enabledFilters) { return isAffectedBy( enabledFilters, false ); } public boolean isAffectedBy(Map<String, Filter> enabledFilters, boolean onlyApplyForLoadByKey) { for ( String filterName : filterNames ) { final var filter = enabledFilters.get( filterName ); if ( filter != null && ( !onlyApplyForLoadByKey || filter.isAppliedToLoadByKey() ) ) { return true; } } return false; } public static void applyBaseRestrictions( Consumer<Predicate> predicateConsumer, Restrictable restrictable, TableGroup rootTableGroup, boolean useIdentificationVariable, LoadQueryInfluencers loadQueryInfluencers, SqlAstCreationState astCreationState) { restrictable.applyBaseRestrictions( predicateConsumer, rootTableGroup, useIdentificationVariable, loadQueryInfluencers.getEnabledFilters(), astCreationState.applyOnlyLoadByKeyFilters(), null, astCreationState ); } public void applyEnabledFilters( Consumer<Predicate> predicateConsumer, FilterAliasGenerator aliasGenerator, Map<String, Filter> enabledFilters, boolean onlyApplyLoadByKeyFilters, TableGroup tableGroup, SqlAstCreationState creationState) { final var predicate = generateFilterPredicate( aliasGenerator, enabledFilters, onlyApplyLoadByKeyFilters, tableGroup, creationState ); if ( predicate != null ) { predicateConsumer.accept( predicate ); } } private FilterPredicate generateFilterPredicate( FilterAliasGenerator aliasGenerator, Map<String, Filter> enabledFilters, boolean onlyApplyLoadByKeyFilters, TableGroup tableGroup, SqlAstCreationState creationState) { final var filterPredicate = new FilterPredicate(); for ( int i = 0, max = filterNames.length; i < max; i++ ) { final var enabledFilter = enabledFilters.get( filterNames[i] ); if ( enabledFilter != null && ( !onlyApplyLoadByKeyFilters || enabledFilter.isAppliedToLoadByKey() ) ) { filterPredicate.applyFragment( render( aliasGenerator, i, tableGroup, creationState ), enabledFilter, parameterNames[i] ); } } return filterPredicate.isEmpty() ? null : filterPredicate; } public String render(FilterAliasGenerator aliasGenerator, Map<String, Filter> enabledFilters) { final var buffer = new StringBuilder(); render( buffer, aliasGenerator, enabledFilters ); return buffer.toString(); } public void render(StringBuilder buffer, FilterAliasGenerator aliasGenerator, Map<String, Filter> enabledFilters) { if ( isNotEmpty( filterNames ) ) { for ( int i = 0, max = filterNames.length; i < max; i++ ) { if ( enabledFilters.containsKey( filterNames[i] ) && isNotEmpty( filterConditions[i] ) ) { if ( !buffer.isEmpty() ) { buffer.append( " and " ); } buffer.append( render( aliasGenerator, i, null, null ) ); } } } } private String render( FilterAliasGenerator aliasGenerator, int filterIndex, TableGroup tableGroup, SqlAstCreationState creationState) { final String condition = filterConditions[filterIndex]; if ( aliasGenerator == null ) { return replace( condition, MARKER + ".", ""); } else { final var aliasTableMap = filterAliasTableMaps[filterIndex]; if ( filterAutoAliasFlags[filterIndex] ) { final String tableName = aliasTableMap.get( null ); return replaceMarker( tableGroup, creationState, condition, aliasGenerator.getAlias( tableName ), tableName( tableGroup, tableName ) ); } else if ( isTableFromPersistentClass( aliasTableMap ) ) { final String tableName = aliasTableMap.get( null ); return replaceAlias( tableGroup, creationState, condition, "{alias}", aliasGenerator.getAlias( tableName ), tableName( tableGroup, tableName ) ); } else { String newCondition = condition; for ( var entry : aliasTableMap.entrySet() ) { final String tableName = entry.getValue(); newCondition = replaceAlias( tableGroup, creationState, newCondition, "{" + entry.getKey() + "}", aliasGenerator.getAlias( tableName ), tableName ); } return newCondition; } } } private String replaceMarker( TableGroup tableGroup, SqlAstCreationState creationState, String condition, String alias, String tableName) { final String newCondition = replace( condition, MARKER, alias ); if ( creationState != null && tableToEntityName != null && !newCondition.equals(condition) ) { registerEntityNameUsage( tableGroup, creationState, tableName ); } return newCondition; } private String replaceAlias( TableGroup tableGroup, SqlAstCreationState creationState, String condition, String placeholder, String alias, String tableName) { final String newCondition = replace( condition, placeholder, alias ); if ( creationState != null && !newCondition.equals(condition) ) { registerEntityNameUsage( tableGroup, creationState, tableName ); } return newCondition; } private void registerEntityNameUsage(TableGroup tableGroup, SqlAstCreationState creationState, String tableName) { creationState.registerEntityNameUsage( tableGroup, EntityNameUse.EXPRESSION, tableToEntityName.get( tableName ) ); } private static String tableName(TableGroup tableGroup, String tableName) { return tableName == null ? tableGroup.getPrimaryTableReference().getTableId() : tableName; } }
FilterHelper
java
apache__camel
components/camel-spring-parent/camel-spring-rabbitmq/src/test/java/org/apache/camel/component/springrabbit/integration/RabbitMQBasicIT.java
{ "start": 1233, "end": 3378 }
class ____ extends RabbitMQITSupport { String foo; String bar; @EndpointInject("mock:result") private MockEndpoint mock; @Override protected RouteBuilder createRouteBuilder() throws Exception { ConnectionProperties connectionProperties = service.connectionProperties(); foo = String.format("spring-rabbitmq:%s:%d/foo?username=%s&password=%s", connectionProperties.hostname(), connectionProperties.port(), connectionProperties.username(), connectionProperties.password()); bar = String.format("spring-rabbitmq:%s:%d/bar?username=%s&password=%s", connectionProperties.hostname(), connectionProperties.port(), connectionProperties.username(), connectionProperties.password()); return new RouteBuilder() { @Override public void configure() throws Exception { from(foo).log("FOO received: ${body}").to(bar); from(bar).log("BAR received: ${body}").to(mock).transform().simple("Bye ${body}"); } }; } @Test public void sentBasicInOnly() throws Exception { mock.expectedBodiesReceived("World"); log.info("Sending to FOO"); template.sendBody(foo, "World"); log.info("Sending to FOO done"); mock.assertIsSatisfied(); } @Test public void sentBasicInOut() throws Exception { mock.expectedBodiesReceived("World"); log.info("Sending to FOO"); String out = template.requestBody(foo, "World", String.class); assertEquals("Bye World", out); log.info("Sending to FOO done"); mock.assertIsSatisfied(); } @Test public void sentBasicInOutTwo() throws Exception { mock.expectedBodiesReceived("World", "Camel"); log.info("Sending to FOO"); String out = template.requestBody(foo, "World", String.class); assertEquals("Bye World", out); out = template.requestBody(foo, "Camel", String.class); assertEquals("Bye Camel", out); log.info("Sending to FOO done"); mock.assertIsSatisfied(); } }
RabbitMQBasicIT
java
apache__camel
components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsNotIncludeAllJMSXPropertiesTest.java
{ "start": 1493, "end": 3607 }
class ____ extends AbstractJMSTest { @Order(2) @RegisterExtension public static CamelContextExtension camelContextExtension = new DefaultCamelContextExtension(); protected CamelContext context; protected ProducerTemplate template; protected ConsumerTemplate consumer; @Test public void testNotIncludeAll() throws Exception { getMockEndpoint("mock:result").expectedBodiesReceived("Hello World"); getMockEndpoint("mock:result").expectedHeaderReceived("foo", "bar"); getMockEndpoint("mock:result").expectedHeaderReceived("JMSXUserID", null); getMockEndpoint("mock:result").expectedHeaderReceived("JMSXAppID", null); Map<String, Object> headers = new HashMap<>(); headers.put("foo", "bar"); headers.put("JMSXUserID", "Donald"); headers.put("JMSXAppID", "MyApp"); template.sendBodyAndHeaders("activemq:queue:JmsNotIncludeAllJMSXPropertiesTest", "Hello World", headers); MockEndpoint.assertIsSatisfied(context); } @Override protected String getComponentName() { return "activemq"; } @Override protected JmsComponent setupComponent(CamelContext camelContext, ArtemisService service, String componentName) { final JmsComponent jms = super.setupComponent(camelContext, service, componentName); jms.getConfiguration().setIncludeAllJMSXProperties(false); return jms; } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { public void configure() { from("activemq:queue:JmsNotIncludeAllJMSXPropertiesTest") .to("mock:result"); } }; } @Override public CamelContextExtension getCamelContextExtension() { return camelContextExtension; } @BeforeEach void setUpRequirements() { context = camelContextExtension.getContext(); template = camelContextExtension.getProducerTemplate(); consumer = camelContextExtension.getConsumerTemplate(); } }
JmsNotIncludeAllJMSXPropertiesTest
java
elastic__elasticsearch
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsRequestTaskSettingsTests.java
{ "start": 665, "end": 1897 }
class ____ extends ESTestCase { public void testFromMap_ReturnsEmptySettings_WhenTheMapIsEmpty() { var settings = AzureAiStudioEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of())); assertThat(settings, is(AzureAiStudioEmbeddingsRequestTaskSettings.EMPTY_SETTINGS)); } public void testFromMap_ReturnsEmptySettings_WhenTheMapDoesNotContainTheFields() { var settings = AzureAiStudioEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of("key", "model"))); assertNull(settings.user()); } public void testFromMap_ReturnsUser() { var settings = AzureAiStudioEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(AzureAiStudioConstants.USER_FIELD, "user"))); assertThat(settings.user(), is("user")); } public void testFromMap_WhenUserIsEmpty_ThrowsValidationException() { var exception = expectThrows( ValidationException.class, () -> AzureAiStudioEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(AzureAiStudioConstants.USER_FIELD, ""))) ); assertThat(exception.getMessage(), containsString("[user] must be a non-empty string")); } }
AzureAiStudioEmbeddingsRequestTaskSettingsTests
java
quarkusio__quarkus
extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/executionmodel/BlockingAnnotationTest.java
{ "start": 652, "end": 1277 }
class ____ { @RegisterExtension public static final QuarkusUnitTest test = new QuarkusUnitTest() .withApplicationRoot(root -> { root.addClasses(Endpoint.class, WSClient.class); }); @Inject Vertx vertx; @TestHTTPResource("endpoint") URI endUri; @Test void testEndoint() { try (WSClient client = new WSClient(vertx)) { client.connect(endUri); assertEquals("evenloop:false,worker:true", client.sendAndAwaitReply("foo").toString()); } } @WebSocket(path = "/endpoint") public static
BlockingAnnotationTest
java
quarkusio__quarkus
independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/InvokerBuilder.java
{ "start": 2752, "end": 3958 }
class ____ implements Invoker&lt;MyService, String&gt; { * String invoke(MyService ignored, Object[] arguments) { * MyService instance = CDI.current().select(MyService.class).get(); * String argument = (String) arguments[0]; * String transformedArgument = argument.toUpperCase(); * String result = instance.hello(transformedArgument); * String transformedResult = Transformations.repeatTwice(result); * return transformedResult; * } * } * </pre> * * The caller of this invoker may pass {@code null} as the target instance, because * the invoker will lookup the target instance on its own. Therefore, calling * {@code invoker.invoke(null, new Object[] {"world"})} will return * {@code "Hello WORLD! Hello WORLD!"}. * * <h2>General requirements</h2> * * To refer to a transformer or a wrapper, all methods in this builder accept: * 1. the {@code Class} that declares the method, and 2. the {@code String} name * of the method. * <p> * Transformers may be {@code static}, in which case they must be declared directly * on the given class, or they may be instance methods, in which case they may be declared * on the given
TheInvoker
java
elastic__elasticsearch
server/src/test/java/org/elasticsearch/search/retriever/RescorerRetrieverBuilderParsingTests.java
{ "start": 1137, "end": 3032 }
class ____ extends AbstractXContentTestCase<RescorerRetrieverBuilder> { private static List<NamedXContentRegistry.Entry> xContentRegistryEntries; @BeforeClass public static void init() { xContentRegistryEntries = new SearchModule(Settings.EMPTY, emptyList()).getNamedXContents(); } @AfterClass public static void afterClass() throws Exception { xContentRegistryEntries = null; } @Override protected RescorerRetrieverBuilder createTestInstance() { int num = randomIntBetween(1, 3); List<RescorerBuilder<?>> rescorers = new ArrayList<>(); for (int i = 0; i < num; i++) { rescorers.add(QueryRescorerBuilderTests.randomRescoreBuilder()); } return new RescorerRetrieverBuilder(TestRetrieverBuilder.createRandomTestRetrieverBuilder(), rescorers); } @Override protected RescorerRetrieverBuilder doParseInstance(XContentParser parser) throws IOException { return (RescorerRetrieverBuilder) RetrieverBuilder.parseTopLevelRetrieverBuilder( parser, new RetrieverParserContext(new SearchUsage(), n -> true) ); } @Override protected boolean supportsUnknownFields() { return false; } @Override protected NamedXContentRegistry xContentRegistry() { List<NamedXContentRegistry.Entry> entries = new ArrayList<>(xContentRegistryEntries); entries.add( new NamedXContentRegistry.Entry( RetrieverBuilder.class, TestRetrieverBuilder.TEST_SPEC.getName(), (p, c) -> TestRetrieverBuilder.TEST_SPEC.getParser().fromXContent(p, (RetrieverParserContext) c), TestRetrieverBuilder.TEST_SPEC.getName().getForRestApiVersion() ) ); return new NamedXContentRegistry(entries); } }
RescorerRetrieverBuilderParsingTests
java
dropwizard__dropwizard
dropwizard-testing/src/test/java/io/dropwizard/testing/app/DropwizardTestApplication.java
{ "start": 2189, "end": 2525 }
class ____ extends PostBodyTask { public EchoTask() { super("echo"); } @Override public void execute(Map<String, List<String>> parameters, String body, PrintWriter output) throws Exception { output.print(body); output.flush(); } } public static
EchoTask
java
elastic__elasticsearch
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java
{ "start": 1084, "end": 3978 }
class ____ implements EvalOperator.ExpressionEvaluator { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(AbsDoubleEvaluator.class); private final Source source; private final EvalOperator.ExpressionEvaluator fieldVal; private final DriverContext driverContext; private Warnings warnings; public AbsDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator fieldVal, DriverContext driverContext) { this.source = source; this.fieldVal = fieldVal; this.driverContext = driverContext; } @Override public Block eval(Page page) { try (DoubleBlock fieldValBlock = (DoubleBlock) fieldVal.eval(page)) { DoubleVector fieldValVector = fieldValBlock.asVector(); if (fieldValVector == null) { return eval(page.getPositionCount(), fieldValBlock); } return eval(page.getPositionCount(), fieldValVector).asBlock(); } } @Override public long baseRamBytesUsed() { long baseRamBytesUsed = BASE_RAM_BYTES_USED; baseRamBytesUsed += fieldVal.baseRamBytesUsed(); return baseRamBytesUsed; } public DoubleBlock eval(int positionCount, DoubleBlock fieldValBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { switch (fieldValBlock.getValueCount(p)) { case 0: result.appendNull(); continue position; case 1: break; default: warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); result.appendNull(); continue position; } double fieldVal = fieldValBlock.getDouble(fieldValBlock.getFirstValueIndex(p)); result.appendDouble(Abs.process(fieldVal)); } return result.build(); } } public DoubleVector eval(int positionCount, DoubleVector fieldValVector) { try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { double fieldVal = fieldValVector.getDouble(p); result.appendDouble(p, Abs.process(fieldVal)); } return result.build(); } } @Override public String toString() { return "AbsDoubleEvaluator[" + "fieldVal=" + fieldVal + "]"; } @Override public void close() { Releasables.closeExpectNoException(fieldVal); } private Warnings warnings() { if (warnings == null) { this.warnings = Warnings.createWarnings( driverContext.warningsMode(), source.source().getLineNumber(), source.source().getColumnNumber(), source.text() ); } return warnings; } static
AbsDoubleEvaluator
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/MultipleInheritanceTest.java
{ "start": 3190, "end": 3304 }
class ____ implements Serializable { @Column(name = "OPTIONAL_ID1") private String id1; } } }
CarOptionalPK
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java
{ "start": 10719, "end": 13079 }
class ____ implements NamedDiff<Metadata.ProjectCustom> { private static final DiffableUtils.DiffableValueReader<String, DataStream> DS_DIFF_READER = new DiffableUtils.DiffableValueReader<>( DataStream::read, DataStream::readDiffFrom ); private static final DiffableUtils.DiffableValueReader<String, DataStreamAlias> ALIAS_DIFF_READER = new DiffableUtils.DiffableValueReader<>(DataStreamAlias::new, DataStreamAlias::readDiffFrom); final DiffableUtils.MapDiff<String, DataStream, ImmutableOpenMap<String, DataStream>> dataStreamDiff; final DiffableUtils.MapDiff<String, DataStreamAlias, ImmutableOpenMap<String, DataStreamAlias>> dataStreamAliasDiff; DataStreamMetadataDiff(DataStreamMetadata before, DataStreamMetadata after) { this.dataStreamDiff = DiffableUtils.diff(before.dataStreams, after.dataStreams, DiffableUtils.getStringKeySerializer()); this.dataStreamAliasDiff = DiffableUtils.diff( before.dataStreamAliases, after.dataStreamAliases, DiffableUtils.getStringKeySerializer() ); } DataStreamMetadataDiff(StreamInput in) throws IOException { this.dataStreamDiff = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), DS_DIFF_READER); this.dataStreamAliasDiff = DiffableUtils.readImmutableOpenMapDiff( in, DiffableUtils.getStringKeySerializer(), ALIAS_DIFF_READER ); } @Override public Metadata.ProjectCustom apply(Metadata.ProjectCustom part) { return new DataStreamMetadata( dataStreamDiff.apply(((DataStreamMetadata) part).dataStreams), dataStreamAliasDiff.apply(((DataStreamMetadata) part).dataStreamAliases) ); } @Override public void writeTo(StreamOutput out) throws IOException { dataStreamDiff.writeTo(out); dataStreamAliasDiff.writeTo(out); } @Override public String getWriteableName() { return TYPE; } @Override public TransportVersion getMinimalSupportedVersion() { return TransportVersion.zero(); } } }
DataStreamMetadataDiff
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/pc/FilterJoinTableTest.java
{ "start": 999, "end": 2457 }
class ____ { @Test public void testLifecycle(EntityManagerFactoryScope scope) { scope.inTransaction( entityManager -> { //tag::pc-filter-join-table-persistence-example[] Client client = new Client() .setId(1L) .setName("John Doe"); client.addAccount( new Account() .setId(1L) .setType(AccountType.CREDIT) .setAmount(5000d) .setRate(1.25 / 100) ); client.addAccount( new Account() .setId(2L) .setType(AccountType.DEBIT) .setAmount(0d) .setRate(1.05 / 100) ); client.addAccount( new Account() .setType(AccountType.DEBIT) .setId(3L) .setAmount(250d) .setRate(1.05 / 100) ); entityManager.persist(client); //end::pc-filter-join-table-persistence-example[] }); scope.inTransaction( entityManager -> { //tag::pc-no-filter-join-table-collection-query-example[] Client client = entityManager.find(Client.class, 1L); assertThat(client.getAccounts()).hasSize( 3 ); //end::pc-no-filter-join-table-collection-query-example[] }); scope.inTransaction( entityManager -> { //tag::pc-filter-join-table-collection-query-example[] Client client = entityManager.find(Client.class, 1L); entityManager .unwrap(Session.class) .enableFilter("firstAccounts") .setParameter("maxOrderId", 1); assertThat(client.getAccounts()).hasSize( 2 ); //end::pc-filter-join-table-collection-query-example[] }); } public
FilterJoinTableTest
java
elastic__elasticsearch
build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/LicenseAnalyzer.java
{ "start": 677, "end": 10702 }
class ____ { /* * Order here matters. License files can often contain multiple licenses for which the particular piece of software may by used under. * We should order these in order of most permissive to least permissive such that we identify the license as the most permissive for * purposes of redistribution. Search order is as defined below so the license will be identified as the first pattern to match. */ private static final LicenseMatcher[] matchers = new LicenseMatcher[] { new LicenseMatcher("Apache-2.0", true, false, Pattern.compile("Apache.*License.*[vV]ersion.*2\\.0", Pattern.DOTALL)), new LicenseMatcher("BSD-2-Clause", true, false, Pattern.compile((""" Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1\\. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer\\. 2\\. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution\\. THIS SOFTWARE IS PROVIDED BY .+ (``|''|")AS IS(''|") AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED\\. IN NO EVENT SHALL .+ BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES \\(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION\\) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT \\(INCLUDING NEGLIGENCE OR OTHERWISE\\) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\\.""").replaceAll("\\s+", "\\\\s*"), Pattern.DOTALL)), new LicenseMatcher("BSD-3-Clause", true, false, Pattern.compile((""" Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: (1\\.)? Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer\\. (2\\.)? Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution\\. ((3\\.)? The name of .+ may not be used to endorse or promote products derived from this software without specific prior written permission\\.| (3\\.)? Neither the name of .+ nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission\\.) THIS SOFTWARE IS PROVIDED BY .+ (``|''|")AS IS(''|") AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED\\. IN NO EVENT SHALL .+ BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES \\(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION\\) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT \\(INCLUDING NEGLIGENCE OR OTHERWISE\\) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\\. """).replaceAll("\\s+", "\\\\s*"), Pattern.DOTALL)), new LicenseMatcher( "CDDL-1.0", true, false, Pattern.compile("COMMON DEVELOPMENT AND DISTRIBUTION LICENSE.*Version 1.0", Pattern.DOTALL) ), new LicenseMatcher( "CDDL-1.1", true, false, Pattern.compile("COMMON DEVELOPMENT AND DISTRIBUTION LICENSE.*Version 1.1", Pattern.DOTALL) ), new LicenseMatcher("ICU", true, false, Pattern.compile("ICU License - ICU 1.8.1 and later", Pattern.DOTALL)), new LicenseMatcher("MIT", true, false, Pattern.compile((""" Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files \\(the "Software"\\), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software\\. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT\\. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE\\.? """).replaceAll("\\s+", "\\\\s*"), Pattern.DOTALL)), new LicenseMatcher( "MIT-0", true, false, Pattern.compile( (""" MIT No Attribution Copyright .+ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files \\(the "Software"\\), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT\\. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """) .replaceAll("\\s+", "\\\\s*"), Pattern.DOTALL ) ), new LicenseMatcher("MPL-1.1", true, false, Pattern.compile("Mozilla Public License.*Version 1.1", Pattern.DOTALL)), new LicenseMatcher("MPL-2.0", true, false, Pattern.compile("Mozilla\\s*Public\\s*License\\s*Version\\s*2\\.0", Pattern.DOTALL)), new LicenseMatcher("XZ", false, false, Pattern.compile("Licensing of XZ for Java", Pattern.DOTALL)), new LicenseMatcher("EPL-2.0", true, false, Pattern.compile("Eclipse Public License - v 2.0", Pattern.DOTALL)), new LicenseMatcher("EDL-1.0", true, false, Pattern.compile("Eclipse Distribution License - v 1.0", Pattern.DOTALL)), new LicenseMatcher("LGPL-2.1", true, true, Pattern.compile("GNU LESSER GENERAL PUBLIC LICENSE.*Version 2.1", Pattern.DOTALL)), new LicenseMatcher("LGPL-3.0", true, true, Pattern.compile("GNU LESSER GENERAL PUBLIC LICENSE.*Version 3", Pattern.DOTALL)), new LicenseMatcher("GeoLite", false, false, Pattern.compile((""" The Elastic GeoIP Database Service uses the GeoLite2 Data created and licensed by MaxMind, which is governed by MaxMind’s GeoLite2 End User License Agreement, available at https://www.maxmind.com/en/geolite2/eula. """).replaceAll("\\s+", "\\\\s*"), Pattern.DOTALL)), new LicenseMatcher( "GeoIp-Database-Service", false, false, Pattern.compile( ("By using the GeoIP Database Service, you agree to the Elastic GeoIP Database Service Agreement,\n" + "available at www.elastic.co/elastic-geoip-database-service-terms.").replaceAll("\\s+", "\\\\s*"), Pattern.DOTALL ) ) }; public static LicenseInfo licenseType(File licenseFile) { for (LicenseMatcher matcher : matchers) { boolean matches = matcher.matches(licenseFile); if (matches) { return new LicenseInfo(matcher.identifier(), matcher.spdxLicense, matcher.sourceRedistributionRequired); } } throw new IllegalStateException("Unknown license for license file: " + licenseFile); } public record LicenseInfo(String identifier, boolean spdxLicense, boolean sourceRedistributionRequired) {} private record LicenseMatcher(String identifier, boolean spdxLicense, boolean sourceRedistributionRequired, Pattern pattern) { public boolean matches(File licenseFile) { try { String content = Files.readString(licenseFile.toPath()).replaceAll("\\*", " "); return pattern.matcher(content).find(); } catch (IOException e) { throw new UncheckedIOException(e); } } } }
LicenseAnalyzer
java
spring-projects__spring-boot
module/spring-boot-health/src/main/java/org/springframework/boot/health/actuate/endpoint/SimpleStatusAggregator.java
{ "start": 3340, "end": 3719 }
class ____ implements Comparator<Status> { @Override public int compare(Status s1, Status s2) { List<String> order = SimpleStatusAggregator.this.order; int i1 = order.indexOf(getUniformCode(s1.getCode())); int i2 = order.indexOf(getUniformCode(s2.getCode())); return (i1 < i2) ? -1 : (i1 != i2) ? 1 : s1.getCode().compareTo(s2.getCode()); } } }
StatusComparator
java
grpc__grpc-java
api/src/test/java/io/grpc/AttributesTest.java
{ "start": 1015, "end": 2271 }
class ____ { private static final Attributes.Key<String> YOLO_KEY = Attributes.Key.create("yolo"); @Test public void buildAttributes() { Attributes attrs = Attributes.newBuilder().set(YOLO_KEY, "To be, or not to be?").build(); assertSame("To be, or not to be?", attrs.get(YOLO_KEY)); assertThat(attrs.keysForTest()).hasSize(1); } @Test public void duplicates() { Attributes attrs = Attributes.newBuilder() .set(YOLO_KEY, "To be?") .set(YOLO_KEY, "Or not to be?") .set(Attributes.Key.create("yolo"), "I'm not a duplicate") .build(); assertThat(attrs.get(YOLO_KEY)).isEqualTo("Or not to be?"); assertThat(attrs.keysForTest()).hasSize(2); } @Test public void toBuilder() { Attributes attrs = Attributes.newBuilder() .set(YOLO_KEY, "To be?") .build() .toBuilder() .set(YOLO_KEY, "Or not to be?") .set(Attributes.Key.create("yolo"), "I'm not a duplicate") .build(); assertThat(attrs.get(YOLO_KEY)).isEqualTo("Or not to be?"); assertThat(attrs.keysForTest()).hasSize(2); } @Test public void empty() { assertThat(Attributes.EMPTY.keysForTest()).isEmpty(); } @Test public void valueEquality() {
AttributesTest
java
grpc__grpc-java
binder/src/main/java/io/grpc/binder/internal/TransactionUtils.java
{ "start": 1007, "end": 4411 }
class ____ { /** Set when the transaction contains rpc prefix data. */ static final int FLAG_PREFIX = 0x1; /** Set when the transaction contains some message data. */ static final int FLAG_MESSAGE_DATA = 0x2; /** Set when the transaction contains rpc suffix data. */ static final int FLAG_SUFFIX = 0x4; /** Set when the transaction is an out-of-band close event. */ static final int FLAG_OUT_OF_BAND_CLOSE = 0x8; /** * When a transaction contains client prefix data, this will be set if the rpc being made is * expected to return a single message. (I.e the method type is either {@link MethodType#UNARY}, * or {@link MethodType#CLIENT_STREAMING}). */ static final int FLAG_EXPECT_SINGLE_MESSAGE = 0x10; /** Set when the included status data includes a description string. */ static final int FLAG_STATUS_DESCRIPTION = 0x20; /** When a transaction contains message data, this will be set if the message is a parcelable. */ static final int FLAG_MESSAGE_DATA_IS_PARCELABLE = 0x40; /** * When a transaction contains message data, this will be set if the message is only partial, and * further transactions are required. */ static final int FLAG_MESSAGE_DATA_IS_PARTIAL = 0x80; static final int STATUS_CODE_SHIFT = 16; static final int STATUS_CODE_MASK = 0xff0000; /** The maximum string length for a status description. */ private static final int MAX_STATUS_DESCRIPTION_LENGTH = 1000; private TransactionUtils() {} static boolean hasFlag(int flags, int flag) { return (flags & flag) != 0; } @Nullable private static String getTruncatedDescription(Status status) { String desc = status.getDescription(); if (desc != null && desc.length() > MAX_STATUS_DESCRIPTION_LENGTH) { desc = desc.substring(0, MAX_STATUS_DESCRIPTION_LENGTH); } return desc; } static Status readStatus(int flags, Parcel parcel) { Status status = Status.fromCodeValue((flags & STATUS_CODE_MASK) >> STATUS_CODE_SHIFT); if ((flags & FLAG_STATUS_DESCRIPTION) != 0) { status = status.withDescription(parcel.readString()); } return status; } static int writeStatus(Parcel parcel, Status status) { int flags = status.getCode().value() << STATUS_CODE_SHIFT; String desc = getTruncatedDescription(status); if (desc != null) { flags |= FLAG_STATUS_DESCRIPTION; parcel.writeString(desc); } return flags; } static void fillInFlags(Parcel parcel, int flags) { int pos = parcel.dataPosition(); parcel.setDataPosition(0); parcel.writeInt(flags); parcel.setDataPosition(pos); } /** * Decorates the given {@link TransactionHandler} with a wrapper that only forwards transactions * from the given `allowedCallingUid`. */ static TransactionHandler newCallerFilteringHandler( int allowedCallingUid, TransactionHandler wrapped) { final Logger logger = Logger.getLogger(TransactionUtils.class.getName()); return new TransactionHandler() { @Override public boolean handleTransaction(int code, Parcel data) { int callingUid = Binder.getCallingUid(); if (callingUid != allowedCallingUid) { logger.log(Level.WARNING, "dropped txn from " + callingUid + " !=" + allowedCallingUid); return false; } return wrapped.handleTransaction(code, data); } }; } }
TransactionUtils
java
hibernate__hibernate-orm
hibernate-envers/src/main/java/org/hibernate/envers/configuration/internal/metadata/reader/AuditedPropertiesReader.java
{ "start": 36476, "end": 36594 }
class ____'t annotated, check property if ( classAudited == null ) { if ( propertyAudited == null ) { // both
isn
java
assertj__assertj-core
assertj-core/src/main/java/org/assertj/core/api/AbstractCharacterAssert.java
{ "start": 871, "end": 1374 }
class ____ all implementations of assertions for {@link Character}s. * * @param <SELF> the "self" type of this assertion class. Please read &quot;<a href="http://bit.ly/1IZIRcY" * target="_blank">Emulating 'self types' using Java Generics to simplify fluent API implementation</a>&quot; * for more details. * * @author Yvonne Wang * @author David DIDIER * @author Ansgar Konermann * @author Alex Ruiz * @author Joel Costigliola * @author Mikhail Mazursky */ public abstract
for
java
apache__camel
components/camel-xmlsecurity/src/main/java/org/apache/camel/component/xmlsecurity/api/XmlSignatureChecker.java
{ "start": 2122, "end": 3198 }
interface ____ { /** Signed info instance. */ SignedInfo getSignedInfo(); /** Signature value instance. */ SignatureValue getSignatureValue(); /** XML objects list. */ List<? extends XMLObject> getObjects(); /** Key info. */ KeyInfo getKeyInfo(); /** Message body containing the XML signature as DOM. */ Document getMessageBodyDocument(); /** Message. */ Message getMessage(); /** * Returns true if a XML schema validation was executed during the parsing of the XML document. * */ boolean isXmlSchemaValidationExecuted(); /** * Returns the total count of XML signatures contained in the document. * */ int getTotalCountOfSignatures(); /** * Returns the current count of XML signatures starting from 1. * */ int getCurrentCountOfSignatures(); /** Current signature element. */ Element getCurrentSignatureElement(); } }
Input
java
mockito__mockito
mockito-core/src/main/java/org/mockito/internal/MockedConstructionImpl.java
{ "start": 502, "end": 1613 }
class ____<T> implements MockedConstruction<T> { private final MockMaker.ConstructionMockControl<T> control; private boolean closed; private final Location location = LocationFactory.create(); protected MockedConstructionImpl(MockMaker.ConstructionMockControl<T> control) { this.control = control; } @Override public List<T> constructed() { return Collections.unmodifiableList(control.getMocks()); } @Override public boolean isClosed() { return closed; } @Override public void close() { assertNotClosed(); closed = true; control.disable(); } @Override public void closeOnDemand() { if (!closed) { close(); } } private void assertNotClosed() { if (closed) { throw new MockitoException( join( "The static mock created at", location.toString(), "is already resolved and cannot longer be used")); } } }
MockedConstructionImpl
java
apache__flink
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/operators/CheckpointCommitter.java
{ "start": 2226, "end": 4365 }
class ____ implements Serializable { protected static final Logger LOG = LoggerFactory.getLogger(CheckpointCommitter.class); protected String jobId; protected String operatorId; /** * Internally used to set the job ID after instantiation. * * @param id * @throws Exception */ public void setJobId(String id) throws Exception { this.jobId = id; } /** * Internally used to set the operator ID after instantiation. * * @param id * @throws Exception */ public void setOperatorId(String id) throws Exception { this.operatorId = id; } /** * Opens/connects to the resource, and possibly creates it beforehand. * * @throws Exception */ public abstract void open() throws Exception; /** * Closes the resource/connection to it. The resource should generally still exist after this * call. * * @throws Exception */ public abstract void close() throws Exception; /** * Creates/opens/connects to the resource that is used to store information. Called once * directly after instantiation. * * @throws Exception */ public abstract void createResource() throws Exception; /** * Mark the given checkpoint as completed in the resource. * * @param subtaskIdx the index of the subtask responsible for committing the checkpoint. * @param checkpointID the id of the checkpoint to be committed. * @throws Exception */ public abstract void commitCheckpoint(int subtaskIdx, long checkpointID) throws Exception; /** * Checked the resource whether the given checkpoint was committed completely. * * @param subtaskIdx the index of the subtask responsible for committing the checkpoint. * @param checkpointID the id of the checkpoint we are interested in. * @return true if the checkpoint was committed completely, false otherwise * @throws Exception */ public abstract boolean isCheckpointCommitted(int subtaskIdx, long checkpointID) throws Exception; }
CheckpointCommitter
java
apache__camel
components/camel-spring-parent/camel-spring-rabbitmq/src/main/java/org/apache/camel/component/springrabbit/EndpointMessageListener.java
{ "start": 1786, "end": 7819 }
class ____ implements ChannelAwareMessageListener { private static final Logger LOG = LoggerFactory.getLogger(EndpointMessageListener.class); private final SpringRabbitMQConsumer consumer; private final SpringRabbitMQEndpoint endpoint; private final AsyncProcessor processor; private RabbitTemplate template; private boolean disableReplyTo; private boolean async; private final Lock lock = new ReentrantLock(); public EndpointMessageListener(SpringRabbitMQConsumer consumer, SpringRabbitMQEndpoint endpoint, Processor processor) { this.consumer = consumer; this.endpoint = endpoint; this.processor = AsyncProcessorConverterHelper.convert(processor); } public boolean isAsync() { return async; } /** * Sets whether asynchronous routing is enabled. * <p/> * By default this is <tt>false</tt>. If configured as <tt>true</tt> then this listener will process the * {@link org.apache.camel.Exchange} asynchronous. */ public void setAsync(boolean async) { this.async = async; } public boolean isDisableReplyTo() { return disableReplyTo; } public void setDisableReplyTo(boolean disableReplyTo) { this.disableReplyTo = disableReplyTo; } public RabbitTemplate getTemplate() { lock.lock(); try { if (template == null) { template = endpoint.createInOnlyTemplate(); } return template; } finally { lock.unlock(); } } public void setTemplate(RabbitTemplate template) { this.template = template; } @Override public void onMessage(Message message, Channel channel) throws Exception { LOG.trace("onMessage START"); LOG.debug("{} consumer received RabbitMQ message: {}", endpoint, message); RuntimeCamelException rce; try { final Address replyDestination = message.getMessageProperties() != null ? message.getMessageProperties().getReplyToAddress() : null; final boolean sendReply = !isDisableReplyTo() && replyDestination != null; final Exchange exchange = createExchange(message, channel, replyDestination); // process the exchange either asynchronously or synchronous LOG.trace("onMessage.process START"); AsyncCallback callback = new EndpointMessageListenerAsyncCallback(message, exchange, endpoint, sendReply, replyDestination); // async is by default false, which mean we by default will process the exchange synchronously // to keep backwards compatible, as well ensure this consumer will pickup messages in order // (eg to not consume the next message before the previous has been fully processed) // but if end user explicit configure consumerAsync=true, then we can process the message // asynchronously (unless endpoint has been configured synchronous, or we use transaction) boolean forceSync = endpoint.isSynchronous(); if (forceSync || !isAsync()) { // must process synchronous if transacted or configured to do so if (LOG.isTraceEnabled()) { LOG.trace("Processing exchange {} synchronously", exchange.getExchangeId()); } try { processor.process(exchange); } catch (Exception e) { exchange.setException(e); } finally { callback.done(true); } } else { // process asynchronous using the async routing engine if (LOG.isTraceEnabled()) { LOG.trace("Processing exchange {} asynchronously", exchange.getExchangeId()); } boolean sync = processor.process(exchange, callback); if (!sync) { // will be done async so return now return; } } // if we failed processed the exchange from the async callback task, then grab the exception rce = exchange.getException(RuntimeCamelException.class); // release back when synchronous mode consumer.releaseExchange(exchange, false); } catch (Exception e) { rce = wrapRuntimeCamelException(e); } // an exception occurred so rethrow to trigger rollback on listener // the listener will use the error handler to handle the uncaught exception if (rce != null) { LOG.trace("onMessage END throwing exception: {}", rce.getMessage()); // Spring message listener container will handle uncaught exceptions throw rce; } LOG.trace("onMessage END"); } protected Exchange createExchange(Message message, Channel channel, Object replyDestination) { Exchange exchange = consumer.createExchange(false); exchange.setProperty(SpringRabbitMQConstants.CHANNEL, channel); Object body = endpoint.getMessageConverter().fromMessage(message); exchange.getMessage().setBody(body); Map<String, Object> headers = endpoint.getMessagePropertiesConverter().fromMessageProperties(message.getMessageProperties(), exchange); if (!headers.isEmpty()) { exchange.getMessage().setHeaders(headers); } // lets set to an InOut if we have some kind of reply-to destination if (replyDestination != null && !disableReplyTo) { // only change pattern if not already out capable if (!exchange.getPattern().isOutCapable()) { exchange.setPattern(ExchangePattern.InOut); } } return exchange; } /** * Callback task that is performed when the exchange has been processed */ private final
EndpointMessageListener
java
apache__hadoop
hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/RecordCreatorFactory.java
{ "start": 1279, "end": 2357 }
class ____ { private static long ttl; /** * Private constructor. */ private RecordCreatorFactory() { } /** * Returns the DNS record creator for the provided type. * * @param type the DNS record type. * @return the record creator. */ static RecordCreator getRecordCreator(int type) { switch (type) { case A: return new ARecordCreator(); case CNAME: return new CNAMERecordCreator(); case TXT: return new TXTRecordCreator(); case AAAA: return new AAAARecordCreator(); case PTR: return new PTRRecordCreator(); case SRV: return new SRVRecordCreator(); default: throw new IllegalArgumentException("No type " + type); } } /** * Set the TTL value for the records created by the factory. * * @param ttl the ttl value, in seconds. */ public static void setTtl(long ttl) { RecordCreatorFactory.ttl = ttl; } /** * A DNS Record creator. * * @param <R> the record type * @param <T> the record's target type */ public
RecordCreatorFactory
java
hibernate__hibernate-orm
hibernate-testing/src/main/java/org/hibernate/testing/orm/junit/DialectFeatureChecks.java
{ "start": 34996, "end": 35174 }
class ____ implements DialectFeatureCheck { public boolean apply(Dialect dialect) { return definesFunction( dialect, "array_length" ); } } public static
SupportsArrayLength
java
elastic__elasticsearch
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java
{ "start": 6362, "end": 7668 }
class ____ implements InferenceConfigUpdate.Builder<ZeroShotClassificationConfigUpdate.Builder, ZeroShotClassificationConfigUpdate> { private List<String> labels; private Boolean isMultiLabel; private String resultsField; private TokenizationUpdate tokenizationUpdate; @Override public ZeroShotClassificationConfigUpdate.Builder setResultsField(String resultsField) { this.resultsField = resultsField; return this; } public Builder setLabels(List<String> labels) { this.labels = labels; return this; } public Builder setMultiLabel(Boolean multiLabel) { isMultiLabel = multiLabel; return this; } public Builder setTokenizationUpdate(TokenizationUpdate tokenizationUpdate) { this.tokenizationUpdate = tokenizationUpdate; return this; } @Override public ZeroShotClassificationConfigUpdate build() { return new ZeroShotClassificationConfigUpdate(labels, isMultiLabel, resultsField, tokenizationUpdate); } } @Override public TransportVersion getMinimalSupportedVersion() { return TransportVersion.minimumCompatible(); } }
Builder
java
assertj__assertj-core
assertj-core/src/main/java/org/assertj/core/api/recursive/assertion/RecursiveAssertionConfiguration.java
{ "start": 28816, "end": 29597 }
class ____ { * Map&lt;String, String&gt; greetings = new HashMap&lt;&gt;(); * } * * Parent parent = new Parent(); * parent.greetings.put("english", "Hi"); * parent.greetings.put("french", "Salut"); * * assertThat(parent).usingRecursiveAssertion() * .allFieldsSatisfy(field -> myPredicate(field)); </code></pre> * * With this policy, <code>myPredicate(field)</code> is applied to the {@code greetings} field and also to * the keys and values of the {@code greetings} map: {@code "english", "Hi", "french"} and {@code "Salut"}. */ MAP_OBJECT_AND_ENTRIES } /** * Possible policies to use regarding optionals when recursively asserting over the fields of an object tree. */ public
Parent
java
elastic__elasticsearch
x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/QueryExecutionMetadataIT.java
{ "start": 858, "end": 5939 }
class ____ extends AbstractCrossClusterTestCase { @Override protected Map<String, Boolean> skipUnavailableForRemoteClusters() { return Map.of(REMOTE_CLUSTER_1, randomBoolean(), REMOTE_CLUSTER_2, randomBoolean()); } protected void assertClusterInfoSuccess(EsqlExecutionInfo.Cluster clusterInfo, int numShards, long overallTookMillis) { assertThat(clusterInfo.getIndexExpression(), equalTo("logs-*")); assertThat(clusterInfo.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); super.assertClusterInfoSuccess(clusterInfo, numShards); } protected EsqlQueryResponse runQueryWithMetadata(String query, Boolean includeExecutionMetadata) { EsqlQueryRequest request = syncEsqlQueryRequest(query); request.pragmas(AbstractEsqlIntegTestCase.randomPragmas()); request.profile(randomInt(5) == 2); request.columnar(randomBoolean()); if (includeExecutionMetadata != null) { request.includeExecutionMetadata(includeExecutionMetadata); } return runQuery(request); } public void testLocal() throws Exception { testQuery(true, false, 45L); } public void testRemote() throws Exception { testQuery(false, true, 285L); } public void testLocalAndRemote() throws Exception { testQuery(true, true, 330L); } protected void testQuery(boolean local, boolean remote, long nRecords) throws Exception { if (local == false && remote == false) { throw new IllegalArgumentException("At least one of local or remote must be true"); } StringBuilder query = new StringBuilder("from "); if (local) { query.append("logs-*"); if (remote) { query.append(","); } } if (remote) { query.append("c*:logs-*"); } query.append(" | stats sum (v)"); Map<String, Object> testClusterInfo = setupTwoClusters(); int localNumShards = (Integer) testClusterInfo.get("local.num_shards"); int remoteNumShards = (Integer) testClusterInfo.get("remote1.num_shards"); boolean includeMetadata = randomBoolean(); try (EsqlQueryResponse resp = runQueryWithMetadata(query.toString(), includeMetadata)) { List<List<Object>> values = getValuesList(resp); assertThat(values, hasSize(1)); assertThat(values.get(0), equalTo(List.of(nRecords))); EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); assertNotNull(executionInfo); assertThat(executionInfo.isCrossClusterSearch(), is(remote)); long overallTookMillis = executionInfo.overallTook().millis(); assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); assertThat( executionInfo.includeExecutionMetadata(), equalTo( includeMetadata ? EsqlExecutionInfo.IncludeExecutionMetadata.ALWAYS : EsqlExecutionInfo.IncludeExecutionMetadata.NEVER ) ); if (remote && local) { assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER_1, LOCAL_CLUSTER))); } else if (remote) { assertThat(executionInfo.clusterAliases(), equalTo(Set.of(REMOTE_CLUSTER_1))); } else { assertThat(executionInfo.clusterAliases(), equalTo(Set.of(LOCAL_CLUSTER))); } if (remote) { EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER_1); assertClusterInfoSuccess(remoteCluster, remoteNumShards, overallTookMillis); } if (local) { EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER); assertClusterInfoSuccess(localCluster, localNumShards, overallTookMillis); } assertClusterMetadataInResponse(resp, includeMetadata); } } private static void assertClusterMetadataInResponse(EsqlQueryResponse resp, boolean present) { try { final Map<String, Object> esqlResponseAsMap = XContentTestUtils.convertToMap(resp); final Object clusters = esqlResponseAsMap.get("_clusters"); if (present) { assertNotNull(clusters); // test a few entries to ensure it looks correct (other tests do a full analysis of the metadata in the response) @SuppressWarnings("unchecked") Map<String, Object> inner = (Map<String, Object>) clusters; assertTrue(inner.containsKey("total")); assertTrue(inner.containsKey("details")); } else { assertNull(clusters); } } catch (IOException e) { fail("Could not convert ESQL response to Map: " + e); } } Map<String, Object> setupTwoClusters() throws IOException { return setupClusters(2); } }
QueryExecutionMetadataIT
java
apache__flink
flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/LocalInputPreferredSlotSharingStrategyTest.java
{ "start": 2698, "end": 15536 }
class ____ extends AbstractSlotSharingStrategyTest { @RegisterExtension private static final TestExecutorExtension<ScheduledExecutorService> EXECUTOR_EXTENSION = TestingUtils.defaultExecutorExtension(); private final JobVertexID jobVertexId3 = new JobVertexID(); private TestingSchedulingExecutionVertex ev11; private TestingSchedulingExecutionVertex ev12; private TestingSchedulingExecutionVertex ev21; private TestingSchedulingExecutionVertex ev22; private TestingSchedulingExecutionVertex ev23; @Override protected SlotSharingStrategy getSlotSharingStrategy( SchedulingTopology topology, Set<SlotSharingGroup> slotSharingGroups, Set<CoLocationGroup> coLocationGroups) { return new LocalInputPreferredSlotSharingStrategy( topology, slotSharingGroups, coLocationGroups); } @Test void testInputLocalityIsRespectedWithRescaleEdge() { createTwoExeVerticesPerJv1AndJv2(slotSharingGroup); ev23 = topology.newExecutionVertex(jobVertexId2, 2); topology.connect(ev11, ev21); topology.connect(ev11, ev22); topology.connect(ev12, ev23); final SlotSharingStrategy strategy = getSlotSharingStrategy( topology, Sets.newHashSet(slotSharingGroup), Collections.emptySet()); assertThat(strategy.getExecutionSlotSharingGroups()).hasSize(3); assertThat(strategy.getExecutionSlotSharingGroup(ev21.getId()).getExecutionVertexIds()) .contains(ev11.getId(), ev21.getId()); assertThat(strategy.getExecutionSlotSharingGroup(ev22.getId()).getExecutionVertexIds()) .contains(ev22.getId()); assertThat(strategy.getExecutionSlotSharingGroup(ev23.getId()).getExecutionVertexIds()) .contains(ev12.getId(), ev23.getId()); } private void createTwoExeVerticesPerJv1AndJv2(SlotSharingGroup sharingGroup) { ev11 = topology.newExecutionVertex(jobVertexId1, 0); ev12 = topology.newExecutionVertex(jobVertexId1, 1); ev21 = topology.newExecutionVertex(jobVertexId2, 0); ev22 = topology.newExecutionVertex(jobVertexId2, 1); sharingGroup.addVertexToGroup(jobVertexId1); sharingGroup.addVertexToGroup(jobVertexId2); } @Test void testInputLocalityIsRespectedWithAllToAllEdge() { slotSharingGroup.addVertexToGroup(jobVertexId1); slotSharingGroup.addVertexToGroup(jobVertexId2); final List<TestingSchedulingExecutionVertex> producer = topology.addExecutionVertices() .withParallelism(2) .withJobVertexID(jobVertexId1) .finish(); final List<TestingSchedulingExecutionVertex> consumer = topology.addExecutionVertices() .withParallelism(2) .withJobVertexID(jobVertexId2) .finish(); topology.connectAllToAll(producer, consumer) .withResultPartitionType(ResultPartitionType.BLOCKING) .finish(); ev11 = producer.get(0); ev12 = producer.get(1); ev21 = consumer.get(0); ev22 = consumer.get(1); final SlotSharingStrategy strategy = getSlotSharingStrategy( topology, Sets.newHashSet(slotSharingGroup), Collections.emptySet()); assertThat(strategy.getExecutionSlotSharingGroups()).hasSize(2); assertThat(strategy.getExecutionSlotSharingGroup(ev21.getId()).getExecutionVertexIds()) .contains(ev11.getId(), ev21.getId()); assertThat(strategy.getExecutionSlotSharingGroup(ev22.getId()).getExecutionVertexIds()) .contains(ev12.getId(), ev22.getId()); } @Test void testCoLocationConstraintIsRespected() { List<Tuple2<JobVertexID, List<TestingSchedulingExecutionVertex>>> jobVertexInfos = new ArrayList<>(); CoLocationGroup coLocationGroup1 = new CoLocationGroupImpl(); CoLocationGroup coLocationGroup2 = new CoLocationGroupImpl(); List<TestingJobVertexInfo> mockedJobVertices = Lists.newArrayList( new TestingJobVertexInfo(1, slotSharingGroup, null), new TestingJobVertexInfo(2, slotSharingGroup, coLocationGroup1), new TestingJobVertexInfo(2, slotSharingGroup, coLocationGroup1), new TestingJobVertexInfo(3, slotSharingGroup, coLocationGroup2), new TestingJobVertexInfo(3, slotSharingGroup, coLocationGroup2)); renderTopology(topology, mockedJobVertices, jobVertexInfos); final SlotSharingStrategy strategy = getSlotSharingStrategy( topology, Sets.newHashSet(slotSharingGroup), Sets.newHashSet(coLocationGroup1, coLocationGroup2)); List<TestingSchedulingExecutionVertex> executionVertices1 = jobVertexInfos.get(1).f1; List<TestingSchedulingExecutionVertex> executionVertices2 = jobVertexInfos.get(2).f1; assertThat(executionVertices1).hasSameSizeAs(executionVertices2); for (int i = 0; i < executionVertices1.size(); i++) { ExecutionSlotSharingGroup executionSlotSharingGroup = strategy.getExecutionSlotSharingGroup(executionVertices1.get(i).getId()); assertThat(executionSlotSharingGroup) .isEqualTo( strategy.getExecutionSlotSharingGroup( executionVertices2.get(i).getId())); } List<TestingSchedulingExecutionVertex> executionVertices3 = jobVertexInfos.get(3).f1; List<TestingSchedulingExecutionVertex> executionVertices4 = jobVertexInfos.get(4).f1; assertThat(executionVertices3).hasSameSizeAs(executionVertices4); for (int i = 0; i < executionVertices3.size(); i++) { assertThat(strategy.getExecutionSlotSharingGroup(executionVertices3.get(i).getId())) .isEqualTo( strategy.getExecutionSlotSharingGroup( executionVertices4.get(i).getId())); } } @Test void testDisjointVerticesInOneGroup() { createTwoExeVerticesPerJv1AndJv2(slotSharingGroup); final SlotSharingStrategy strategy = getSlotSharingStrategy( topology, Sets.newHashSet(slotSharingGroup), Collections.emptySet()); assertThat(strategy.getExecutionSlotSharingGroups()).hasSize(2); assertThat(strategy.getExecutionSlotSharingGroup(ev11.getId()).getExecutionVertexIds()) .contains(ev11.getId(), ev21.getId()); assertThat(strategy.getExecutionSlotSharingGroup(ev12.getId()).getExecutionVertexIds()) .contains(ev12.getId(), ev22.getId()); } @Test void testVerticesInDifferentSlotSharingGroups() { ev11 = topology.newExecutionVertex(jobVertexId1, 0); ev12 = topology.newExecutionVertex(jobVertexId1, 1); ev21 = topology.newExecutionVertex(jobVertexId2, 0); ev22 = topology.newExecutionVertex(jobVertexId2, 1); slotSharingGroup1.addVertexToGroup(jobVertexId1); slotSharingGroup2.addVertexToGroup(jobVertexId2); final SlotSharingStrategy strategy = getSlotSharingStrategy( topology, Sets.newHashSet(slotSharingGroup1, slotSharingGroup2), Collections.emptySet()); assertThat(strategy.getExecutionSlotSharingGroups()).hasSize(4); assertThat(strategy.getExecutionSlotSharingGroup(ev11.getId()).getExecutionVertexIds()) .contains(ev11.getId()); assertThat(strategy.getExecutionSlotSharingGroup(ev12.getId()).getExecutionVertexIds()) .contains(ev12.getId()); assertThat(strategy.getExecutionSlotSharingGroup(ev21.getId()).getExecutionVertexIds()) .contains(ev21.getId()); assertThat(strategy.getExecutionSlotSharingGroup(ev22.getId()).getExecutionVertexIds()) .contains(ev22.getId()); } /** * In this test case, there are two JobEdges between two JobVertices. There will be no * ExecutionSlotSharingGroup that contains two vertices with the same JobVertexID. */ @Test void testInputLocalityIsRespectedWithTwoEdgesBetweenTwoVertices() throws Exception { createTwoExeVerticesPerJv1AndJv2(slotSharingGroup); int parallelism = 4; JobVertex v1 = createJobVertex("v1", jobVertexId1, parallelism); JobVertex v2 = createJobVertex("v2", jobVertexId2, parallelism); connectNewDataSetAsInput( v2, v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING); connectNewDataSetAsInput( v2, v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING); assertThat(v1.getProducedDataSets()).hasSize(2); assertThat(v2.getInputs()).hasSize(2); final JobGraph jobGraph = JobGraphTestUtils.batchJobGraph(v1, v2); final ExecutionGraph executionGraph = TestingDefaultExecutionGraphBuilder.newBuilder() .setJobGraph(jobGraph) .build(EXECUTOR_EXTENSION.getExecutor()); final SchedulingTopology topology = executionGraph.getSchedulingTopology(); final SlotSharingStrategy strategy = getSlotSharingStrategy( topology, Sets.newHashSet(slotSharingGroup), Collections.emptySet()); assertThat(strategy.getExecutionSlotSharingGroups()).hasSize(4); ExecutionVertex[] ev1 = Objects.requireNonNull(executionGraph.getJobVertex(jobVertexId1)).getTaskVertices(); ExecutionVertex[] ev2 = Objects.requireNonNull(executionGraph.getJobVertex(jobVertexId2)).getTaskVertices(); for (int i = 0; i < parallelism; i++) { assertThat( strategy.getExecutionSlotSharingGroup(ev1[i].getID()) .getExecutionVertexIds()) .contains(ev1[i].getID(), ev2[i].getID()); } } @Test void testGetExecutionSlotSharingGroupOfLateAttachedVertices() { slotSharingGroup1.addVertexToGroup(jobVertexId1); slotSharingGroup1.addVertexToGroup(jobVertexId2); slotSharingGroup2.addVertexToGroup(jobVertexId3); TestingSchedulingExecutionVertex ev1 = topology.newExecutionVertex(jobVertexId1, 0); TestingSchedulingExecutionVertex ev2 = topology.newExecutionVertex(jobVertexId2, 0); topology.connect(ev1, ev2); final LocalInputPreferredSlotSharingStrategy strategy = (LocalInputPreferredSlotSharingStrategy) getSlotSharingStrategy( topology, new HashSet<>(Arrays.asList(slotSharingGroup1, slotSharingGroup2)), Collections.emptySet()); assertThat(strategy.getExecutionSlotSharingGroups()).hasSize(1); assertThat(strategy.getExecutionSlotSharingGroup(ev1.getId()).getExecutionVertexIds()) .contains(ev1.getId(), ev2.getId()); assertThat(strategy.getExecutionSlotSharingGroup(ev2.getId()).getExecutionVertexIds()) .contains(ev1.getId(), ev2.getId()); // add new job vertices and notify scheduling topology updated TestingSchedulingExecutionVertex ev3 = topology.newExecutionVertex(jobVertexId3, 0); topology.connect(ev2, ev3, ResultPartitionType.BLOCKING); strategy.notifySchedulingTopologyUpdated(topology, Collections.singletonList(ev3.getId())); assertThat(strategy.getExecutionSlotSharingGroups()).hasSize(2); assertThat(strategy.getExecutionSlotSharingGroup(ev1.getId()).getExecutionVertexIds()) .contains(ev1.getId(), ev2.getId()); assertThat(strategy.getExecutionSlotSharingGroup(ev2.getId()).getExecutionVertexIds()) .contains(ev1.getId(), ev2.getId()); assertThat(strategy.getExecutionSlotSharingGroup(ev3.getId()).getExecutionVertexIds()) .contains(ev3.getId()); } private static JobVertex createJobVertex( String vertexName, JobVertexID vertexId, int parallelism) { JobVertex jobVertex = new JobVertex(vertexName, vertexId); jobVertex.setParallelism(parallelism); jobVertex.setInvokableClass(NoOpInvokable.class); return jobVertex; } }
LocalInputPreferredSlotSharingStrategyTest
java
apache__logging-log4j2
log4j-core/src/main/java/org/apache/logging/log4j/core/script/ScriptManager.java
{ "start": 8885, "end": 11090 }
class ____ extends AbstractScriptRunner { private final AbstractScript script; private final CompiledScript compiledScript; private final ScriptEngine scriptEngine; public MainScriptRunner(final ScriptEngine scriptEngine, final AbstractScript script) { this.script = script; this.scriptEngine = scriptEngine; CompiledScript compiled = null; if (scriptEngine instanceof Compilable) { logger.debug("Script {} is compilable", script.getId()); compiled = AccessController.doPrivileged((PrivilegedAction<CompiledScript>) () -> { try { return ((Compilable) scriptEngine).compile(script.getScriptText()); } catch (final Throwable ex) { /* * ScriptException is what really should be caught here. However, beanshell's ScriptEngine * implements Compilable but then throws Error when the compile method is called! */ logger.warn("Error compiling script", ex); return null; } }); } compiledScript = compiled; } @Override public ScriptEngine getScriptEngine() { return this.scriptEngine; } @Override public Object execute(final Bindings bindings) { if (compiledScript != null) { try { return compiledScript.eval(bindings); } catch (final ScriptException ex) { logger.error("Error running script " + script.getId(), ex); return null; } } try { return scriptEngine.eval(script.getScriptText(), bindings); } catch (final ScriptException ex) { logger.error("Error running script " + script.getId(), ex); return null; } } @Override public AbstractScript getScript() { return script; } } private
MainScriptRunner
java
quarkusio__quarkus
extensions/redis-cache/deployment/src/test/java/io/quarkus/cache/redis/deployment/BasicRedisCacheTest.java
{ "start": 753, "end": 6387 }
class ____ { private static final String KEY_1 = "1"; private static final String KEY_2 = "2"; @RegisterExtension static final QuarkusUnitTest TEST = new QuarkusUnitTest() .withApplicationRoot(jar -> jar.addClasses(SimpleCachedService.class, TestUtil.class)); @Inject SimpleCachedService simpleCachedService; @Test public void testTypes() { CacheManager cacheManager = Arc.container().select(CacheManager.class).get(); assertNotNull(cacheManager); Optional<Cache> cacheOpt = cacheManager.getCache(SimpleCachedService.CACHE_NAME); assertTrue(cacheOpt.isPresent()); Cache cache = cacheOpt.get(); assertTrue(cache instanceof RedisCache); } @Test public void testAllCacheAnnotations() { RedisDataSource redisDataSource = Arc.container().select(RedisDataSource.class).get(); List<String> allKeysAtStart = TestUtil.allRedisKeys(redisDataSource); // STEP 1 // Action: @CacheResult-annotated method call. // Expected effect: method invoked and result cached. // Verified by: STEP 2. String value1 = simpleCachedService.cachedMethod(KEY_1); List<String> newKeys = TestUtil.allRedisKeys(redisDataSource); assertEquals(allKeysAtStart.size() + 1, newKeys.size(), "Compared " + allKeysAtStart + " and " + newKeys); Assertions.assertThat(newKeys).contains(expectedCacheKey(KEY_1)); // STEP 2 // Action: same call as STEP 1. // Expected effect: method not invoked and result coming from the cache. // Verified by: same object reference between STEPS 1 and 2 results. String value2 = simpleCachedService.cachedMethod(KEY_1); assertEquals(value1, value2); assertEquals(allKeysAtStart.size() + 1, TestUtil.allRedisKeys(redisDataSource).size()); // STEP 3 // Action: same call as STEP 2 with a new key. // Expected effect: method invoked and result cached. // Verified by: different objects references between STEPS 2 and 3 results. String value3 = simpleCachedService.cachedMethod(KEY_2); assertNotEquals(value2, value3); newKeys = TestUtil.allRedisKeys(redisDataSource); assertEquals(allKeysAtStart.size() + 2, newKeys.size()); Assertions.assertThat(newKeys).contains(expectedCacheKey(KEY_1), expectedCacheKey(KEY_2)); // STEP 4 // Action: cache entry invalidation. // Expected effect: STEP 2 cache entry removed. // Verified by: STEP 5. simpleCachedService.invalidate(KEY_1); newKeys = TestUtil.allRedisKeys(redisDataSource); assertEquals(allKeysAtStart.size() + 1, newKeys.size()); Assertions.assertThat(newKeys).contains(expectedCacheKey(KEY_2)).doesNotContain(expectedCacheKey(KEY_1)); // STEP 5 // Action: same call as STEP 2. // Expected effect: method invoked because of STEP 4 and result cached. // Verified by: different objects references between STEPS 2 and 5 results. String value5 = simpleCachedService.cachedMethod(KEY_1); assertNotEquals(value2, value5); newKeys = TestUtil.allRedisKeys(redisDataSource); assertEquals(allKeysAtStart.size() + 2, newKeys.size()); Assertions.assertThat(newKeys).contains(expectedCacheKey(KEY_1), expectedCacheKey(KEY_2)); // STEP 6 // Action: same call as STEP 3. // Expected effect: method not invoked and result coming from the cache. // Verified by: same object reference between STEPS 3 and 6 results. String value6 = simpleCachedService.cachedMethod(KEY_2); assertEquals(value3, value6); assertEquals(allKeysAtStart.size() + 2, TestUtil.allRedisKeys(redisDataSource).size()); // STEP 7 // Action: add 100 cached keys, to make sure the SCAN command in next step requires multiple iterations // Expected effect: + 100 keys in Redis // Verified by: comparison with previous number of keys for (int i = 0; i < 100; i++) { simpleCachedService.cachedMethod("extra-" + i); } assertEquals(allKeysAtStart.size() + 102, TestUtil.allRedisKeys(redisDataSource).size()); // STEP 8 // Action: full cache invalidation. // Expected effect: empty cache. // Verified by: comparison with previous number of keys, STEPS 9 and 10. simpleCachedService.invalidateAll(); newKeys = TestUtil.allRedisKeys(redisDataSource); assertEquals(allKeysAtStart.size(), newKeys.size(), "Compared " + allKeysAtStart + " and " + newKeys); Assertions.assertThat(newKeys).doesNotContain(expectedCacheKey(KEY_1), expectedCacheKey(KEY_2)); // STEP 9 // Action: same call as STEP 5. // Expected effect: method invoked because of STEP 8 and result cached. // Verified by: different objects references between STEPS 5 and 9 results. String value9 = simpleCachedService.cachedMethod(KEY_1); assertNotEquals(value5, value9); // STEP 10 // Action: same call as STEP 6. // Expected effect: method invoked because of STEP 8 and result cached. // Verified by: different objects references between STEPS 6 and 10 results. String value10 = simpleCachedService.cachedMethod(KEY_2); assertNotEquals(value6, value10); } private static String expectedCacheKey(String key) { return "cache:" + SimpleCachedService.CACHE_NAME + ":" + key; } }
BasicRedisCacheTest
java
apache__flink
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/catalog/UnknownCatalogTest.java
{ "start": 2152, "end": 12123 }
class ____ { public static final String BUILTIN_CATALOG = "cat"; private static final String BUILTIN_DATABASE = "db"; public static final EnvironmentSettings ENVIRONMENT_SETTINGS = EnvironmentSettings.newInstance() .inStreamingMode() .withBuiltInCatalogName(BUILTIN_CATALOG) .withBuiltInDatabaseName(BUILTIN_DATABASE) .build(); public static final ResolvedSchema EXPECTED_SCHEMA = ResolvedSchema.of(Column.physical("i", INT()), Column.physical("s", STRING())); public static final ResolvedSchema CURRENT_TIMESTAMP_EXPECTED_SCHEMA = ResolvedSchema.of(Column.physical("CURRENT_TIMESTAMP", TIMESTAMP_LTZ(3).notNull())); @Test void testUnsetCatalogWithSelectCurrentTimestamp() { TableEnvironment tEnv = TableEnvironment.create(ENVIRONMENT_SETTINGS); tEnv.useCatalog(null); Table table = tEnv.sqlQuery("SELECT CURRENT_TIMESTAMP"); assertThat(table.getResolvedSchema()).isEqualTo(CURRENT_TIMESTAMP_EXPECTED_SCHEMA); } @Test void testSetCatalogUnsetDatabaseWithSelectCurrentTimestamp() { TableEnvironment tEnv = TableEnvironment.create(ENVIRONMENT_SETTINGS); tEnv.useCatalog(BUILTIN_CATALOG); tEnv.useDatabase(null); Table table = tEnv.sqlQuery("SELECT CURRENT_TIMESTAMP"); assertThat(table.getResolvedSchema()).isEqualTo(CURRENT_TIMESTAMP_EXPECTED_SCHEMA); } @Test void testSetCatalogWithSelectCurrentTimestamp() { TableEnvironment tEnv = TableEnvironment.create(ENVIRONMENT_SETTINGS); tEnv.useCatalog(BUILTIN_CATALOG); Table table = tEnv.sqlQuery("SELECT CURRENT_TIMESTAMP"); assertThat(table.getResolvedSchema()).isEqualTo(CURRENT_TIMESTAMP_EXPECTED_SCHEMA); } @Test void testUnsetCatalogWithShowFunctions() { TableEnvironment tEnv = TableEnvironment.create(ENVIRONMENT_SETTINGS); tEnv.useCatalog(null); TableResult table = tEnv.executeSql("SHOW FUNCTIONS"); final List<Row> functions = CollectionUtil.iteratorToList(table.collect()); // check it has some built-in functions assertThat(functions).hasSizeGreaterThan(0); } @ParameterizedTest(name = "{index}: {0}") @ValueSource( strings = { "SHOW TABLES", "SHOW TABLES IN db", "SHOW VIEWS", "SHOW VIEWS IN db", "SHOW PROCEDURES", "SHOW PROCEDURES IN db", "SHOW COLUMNS IN db", "SHOW DATABASES", "SHOW MATERIALIZED TABLES", "SHOW MATERIALIZED TABLES IN db" }) void showForUnsetCatalog(String sql) { TableEnvironment tEnv = TableEnvironment.create(ENVIRONMENT_SETTINGS); tEnv.useCatalog(null); assertThatThrownBy(() -> tEnv.executeSql(sql)) .isInstanceOf(ValidationException.class) .hasMessageContaining("A current catalog has not been set"); } @ParameterizedTest(name = "{index}: {0}") @ValueSource( strings = { "SHOW TABLES", "SHOW MATERIALIZED TABLES", "SHOW VIEWS", "SHOW PROCEDURES", // Here `db` is considered as object name "SHOW COLUMNS IN db" }) void showForUnsetDatabase(String sql) { TableEnvironment tEnv = TableEnvironment.create(ENVIRONMENT_SETTINGS); tEnv.useCatalog("cat"); tEnv.useDatabase(null); assertThatThrownBy(() -> tEnv.executeSql(sql)) .isInstanceOf(ValidationException.class) .hasMessageContaining("A current database has not been set"); } @Test void testUnsetCatalogWithFullyQualified() { TableEnvironment tEnv = TableEnvironment.create(ENVIRONMENT_SETTINGS); tEnv.useCatalog(null); final String tablePath = String.format("%s.%s.%s", BUILTIN_CATALOG, BUILTIN_DATABASE, "tb"); registerTable(tEnv, tablePath); Table table = tEnv.sqlQuery(String.format("SELECT * FROM %s", tablePath)); assertThat(table.getResolvedSchema()).isEqualTo(EXPECTED_SCHEMA); } @Test void testUnsetCatalogWithSingleIdentifier() { TableEnvironment tEnv = TableEnvironment.create(ENVIRONMENT_SETTINGS); tEnv.useCatalog(null); final String tableName = "tb"; final String tablePath = String.format("%s.%s.%s", BUILTIN_CATALOG, BUILTIN_DATABASE, tableName); registerTable(tEnv, tablePath); assertThatThrownBy(() -> tEnv.sqlQuery("SELECT * FROM " + tableName)) .isInstanceOf(ValidationException.class) .hasMessageContaining(String.format("Object '%s' not found", tableName)); } @Test void testUsingUnknownDatabaseWithDatabaseQualified() { TableEnvironment tEnv = TableEnvironment.create(ENVIRONMENT_SETTINGS); tEnv.useDatabase(null); final String tableName = "tb"; final String tablePath = String.format("%s.%s.%s", BUILTIN_CATALOG, BUILTIN_DATABASE, tableName); registerTable(tEnv, tablePath); Table table = tEnv.sqlQuery(String.format("SELECT * FROM %s.%s", BUILTIN_DATABASE, tableName)); assertThat(table.getResolvedSchema()).isEqualTo(EXPECTED_SCHEMA); } @Test void testUsingUnknownDatabaseWithSingleIdentifier() { TableEnvironment tEnv = TableEnvironment.create(ENVIRONMENT_SETTINGS); tEnv.useDatabase(null); final String tableName = "tb"; final String tablePath = String.format("%s.%s.%s", BUILTIN_CATALOG, BUILTIN_DATABASE, tableName); registerTable(tEnv, tablePath); assertThatThrownBy(() -> tEnv.sqlQuery("SELECT * FROM " + tableName)) .isInstanceOf(ValidationException.class) .hasMessageContaining(String.format("Object '%s' not found", tableName)); } @Test void testUnsetCatalogWithAlterTable() { TableEnvironment tEnv = TableEnvironment.create(ENVIRONMENT_SETTINGS); tEnv.useCatalog(null); final String tableName = "tb"; final String tablePath = String.format("%s.%s.%s", BUILTIN_CATALOG, BUILTIN_DATABASE, tableName); registerTable(tEnv, tablePath); assertThatThrownBy( () -> tEnv.executeSql( String.format("ALTER TABLE %s ADD (f STRING)", tableName))) .isInstanceOf(ValidationException.class) .hasMessage( "A current catalog has not been set. Please use a fully qualified" + " identifier (such as 'my_catalog.my_database.my_table') or set a" + " current catalog using 'USE CATALOG my_catalog'."); } @Test void testUnsetDatabaseWithAlterTable() { TableEnvironment tEnv = TableEnvironment.create(ENVIRONMENT_SETTINGS); tEnv.useDatabase(null); final String tableName = "tb"; final String tablePath = String.format("%s.%s.%s", BUILTIN_CATALOG, BUILTIN_DATABASE, tableName); registerTable(tEnv, tablePath); assertThatThrownBy( () -> tEnv.executeSql( String.format("ALTER TABLE %s ADD (f STRING)", tableName))) .isInstanceOf(ValidationException.class) .hasMessage( "A current database has not been set. Please use a fully qualified" + " identifier (such as 'my_database.my_table' or" + " 'my_catalog.my_database.my_table') or set a current database" + " using 'USE my_database'."); } @Test void testUnsetDatabaseComingFromCatalogWithAlterTable() throws Exception { TableEnvironment tEnv = TableEnvironment.create(ENVIRONMENT_SETTINGS); final String catalogName = "custom"; final NullDefaultDatabaseCatalog catalog = new NullDefaultDatabaseCatalog(catalogName); catalog.createDatabase( BUILTIN_DATABASE, new CatalogDatabaseImpl(Collections.emptyMap(), null), false); tEnv.registerCatalog(catalogName, catalog); tEnv.useCatalog(catalogName); final String tableName = "tb"; final String tablePath = String.format("%s.%s.%s", catalogName, BUILTIN_DATABASE, tableName); registerTable(tEnv, tablePath); assertThatThrownBy( () -> tEnv.executeSql( String.format("ALTER TABLE %s ADD (f STRING)", tableName))) .isInstanceOf(ValidationException.class) .hasMessage( "A current database has not been set. Please use a fully qualified" + " identifier (such as 'my_database.my_table' or" + " 'my_catalog.my_database.my_table') or set a current database" + " using 'USE my_database'."); } private static void registerTable(TableEnvironment tEnv, String tableName) { final String input1DataId = TestValuesTableFactory.registerData(Arrays.asList(Row.of(1, "a"), Row.of(2, "b"))); tEnv.createTable( tableName, TableDescriptor.forConnector("values") .option("data-id", input1DataId) .schema(Schema.newBuilder().fromResolvedSchema(EXPECTED_SCHEMA).build()) .build()); } private static
UnknownCatalogTest
java
apache__rocketmq
client/src/main/java/org/apache/rocketmq/client/producer/TransactionSendResult.java
{ "start": 855, "end": 1269 }
class ____ extends SendResult { private LocalTransactionState localTransactionState; public TransactionSendResult() { } public LocalTransactionState getLocalTransactionState() { return localTransactionState; } public void setLocalTransactionState(LocalTransactionState localTransactionState) { this.localTransactionState = localTransactionState; } }
TransactionSendResult
java
micronaut-projects__micronaut-core
test-suite-groovy/src/test/groovy/io/micronaut/docs/config/env/HighRateLimit.java
{ "start": 74, "end": 212 }
class ____ extends RateLimit { public HighRateLimit(Duration period, Integer limit) { super(period, limit); } }
HighRateLimit
java
hibernate__hibernate-orm
hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/identity/CUBRIDIdentityColumnSupport.java
{ "start": 252, "end": 832 }
class ____ extends IdentityColumnSupportImpl { public static final CUBRIDIdentityColumnSupport INSTANCE = new CUBRIDIdentityColumnSupport(); @Override public boolean supportsIdentityColumns() { return true; } @Override public String getIdentityInsertString() { return "NULL"; } @Override public String getIdentitySelectString(String table, String column, int type) { return "select last_insert_id()"; } @Override public String getIdentityColumnString(int type) { //starts with 1, implicitly return "not null auto_increment"; } }
CUBRIDIdentityColumnSupport
java
spring-projects__spring-boot
module/spring-boot-data-elasticsearch-test/src/dockerTest/java/org/springframework/boot/data/elasticsearch/test/autoconfigure/DataElasticsearchTestIntegrationTests.java
{ "start": 1957, "end": 3266 }
class ____ { @Container @ServiceConnection static final ElasticsearchContainer elasticsearch = TestImage.container(ElasticsearchContainer.class); @Autowired private ElasticsearchTemplate elasticsearchTemplate; @Autowired private ExampleRepository exampleRepository; @Autowired private ApplicationContext applicationContext; @Test void didNotInjectExampleService() { assertThatExceptionOfType(NoSuchBeanDefinitionException.class) .isThrownBy(() -> this.applicationContext.getBean(ExampleService.class)); } @Test void testRepository() { ExampleDocument document = new ExampleDocument(); document.setText("Look, new @DataElasticsearchTest!"); String id = UUID.randomUUID().toString(); document.setId(id); ExampleDocument savedDocument = this.exampleRepository.save(document); ExampleDocument getDocument = this.elasticsearchTemplate.get(id, ExampleDocument.class); assertThat(getDocument).isNotNull(); assertThat(getDocument.getId()).isNotNull(); assertThat(getDocument.getId()).isEqualTo(savedDocument.getId()); this.exampleRepository.deleteAll(); } @Test void serviceConnectionAutoConfigurationWasImported() { assertThat(this.applicationContext).has(importedAutoConfiguration(ServiceConnectionAutoConfiguration.class)); } }
DataElasticsearchTestIntegrationTests
java
apache__camel
components/camel-mongodb-gridfs/src/generated/java/org/apache/camel/component/mongodb/gridfs/GridFsComponentConfigurer.java
{ "start": 741, "end": 2666 }
class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter { @Override public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) { GridFsComponent target = (GridFsComponent) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "autowiredenabled": case "autowiredEnabled": target.setAutowiredEnabled(property(camelContext, boolean.class, value)); return true; case "bridgeerrorhandler": case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true; case "lazystartproducer": case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true; default: return false; } } @Override public Class<?> getOptionType(String name, boolean ignoreCase) { switch (ignoreCase ? name.toLowerCase() : name) { case "autowiredenabled": case "autowiredEnabled": return boolean.class; case "bridgeerrorhandler": case "bridgeErrorHandler": return boolean.class; case "lazystartproducer": case "lazyStartProducer": return boolean.class; default: return null; } } @Override public Object getOptionValue(Object obj, String name, boolean ignoreCase) { GridFsComponent target = (GridFsComponent) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "autowiredenabled": case "autowiredEnabled": return target.isAutowiredEnabled(); case "bridgeerrorhandler": case "bridgeErrorHandler": return target.isBridgeErrorHandler(); case "lazystartproducer": case "lazyStartProducer": return target.isLazyStartProducer(); default: return null; } } }
GridFsComponentConfigurer
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HostRestrictingAuthorizationFilter.java
{ "start": 13700, "end": 15276 }
class ____ implements HttpInteraction { private final FilterChain chain; private final HttpServletRequest httpRequest; private final HttpServletResponse httpResponse; /* * Creates a new ServletFilterHttpInteraction. * * @param httpRequest request to process * @param httpResponse response to process * @param chain filter chain to forward to if HTTP interaction is allowed */ public ServletFilterHttpInteraction(HttpServletRequest httpRequest, HttpServletResponse httpResponse, FilterChain chain) { this.httpRequest = httpRequest; this.httpResponse = httpResponse; this.chain = chain; } @Override public boolean isCommitted() { return (httpResponse.isCommitted()); } @Override public String getRemoteAddr() { return (httpRequest.getRemoteAddr()); } @Override public String getRemoteUser() { return (httpRequest.getRemoteUser()); } @Override public String getRequestURI() { return (httpRequest.getRequestURI()); } @Override public String getQueryString() { return (httpRequest.getQueryString()); } @Override public String getMethod() { return httpRequest.getMethod(); } @Override public void proceed() throws IOException, ServletException { chain.doFilter(httpRequest, httpResponse); } @Override public void sendError(int code, String message) throws IOException { httpResponse.sendError(code, message); } } }
ServletFilterHttpInteraction
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/bugs/_3089/domain/ImmutableItem.java
{ "start": 4787, "end": 10252 }
class ____ { private static final long INIT_BIT_ID = 0x1L; private long initBits = 0x1L; private String id; private Map<String, String> attributes = new LinkedHashMap<String, String>(); public Builder() { } /** * Fill a builder with attribute values from the provided {@code Item} instance. * Regular attribute values will be replaced with those from the given instance. * Absent optional values will not replace present values. * Collection elements and entries will be added, not replaced. * @param instance The instance from which to copy values * @return {@code this} builder for use in a chained invocation */ public final Builder from(Item instance) { Objects.requireNonNull(instance, "instance"); id(instance.getId()); putAllAttributes(instance.getAttributes()); return this; } /** * Initializes the value for the {@link Item#getId() id} attribute. * @param id The value for id * @return {@code this} builder for use in a chained invocation */ public final Builder id(String id) { this.id = Objects.requireNonNull(id, "id"); initBits &= ~INIT_BIT_ID; return this; } /** * Put one entry to the {@link Item#getAttributes() attributes} map. * @param key The key in the attributes map * @param value The associated value in the attributes map * @return {@code this} builder for use in a chained invocation */ public final Builder putAttributes(String key, String value) { this.attributes.put( Objects.requireNonNull(key, "attributes key"), Objects.requireNonNull(value, "attributes value")); return this; } /** * Put one entry to the {@link Item#getAttributes() attributes} map. Nulls are not permitted * @param entry The key and value entry * @return {@code this} builder for use in a chained invocation */ public final Builder putAttributes(Map.Entry<String, ? extends String> entry) { String k = entry.getKey(); String v = entry.getValue(); this.attributes.put( Objects.requireNonNull(k, "attributes key"), Objects.requireNonNull(v, "attributes value")); return this; } /** * Sets or replaces all mappings from the specified map as entries for the {@link Item#getAttributes() attributes} map. Nulls are not permitted * @param entries The entries that will be added to the attributes map * @return {@code this} builder for use in a chained invocation */ public final Builder attributes(Map<String, ? extends String> entries) { this.attributes.clear(); return putAllAttributes(entries); } /** * Put all mappings from the specified map as entries to {@link Item#getAttributes() attributes} map. Nulls are not permitted * @param entries The entries that will be added to the attributes map * @return {@code this} builder for use in a chained invocation */ public final Builder putAllAttributes(Map<String, ? extends String> entries) { for (Map.Entry<String, ? extends String> e : entries.entrySet()) { String k = e.getKey(); String v = e.getValue(); this.attributes.put( Objects.requireNonNull(k, "attributes key"), Objects.requireNonNull(v, "attributes value")); } return this; } /** * Builds a new {@link ImmutableItem ImmutableItem}. * @return An immutable instance of Item * @throws java.lang.IllegalStateException if any required attributes are missing */ public ImmutableItem build() { if (initBits != 0) { throw new IllegalStateException(formatRequiredAttributesMessage()); } return new ImmutableItem(id, createUnmodifiableMap(false, false, attributes)); } private String formatRequiredAttributesMessage() { List<String> attributes = new ArrayList<>(); if ((initBits & INIT_BIT_ID) != 0) attributes.add("id"); return "Cannot build Item, some of required attributes are not set " + attributes; } } private static <K, V> Map<K, V> createUnmodifiableMap(boolean checkNulls, boolean skipNulls, Map<? extends K, ? extends V> map) { switch (map.size()) { case 0: return Collections.emptyMap(); case 1: { Map.Entry<? extends K, ? extends V> e = map.entrySet().iterator().next(); K k = e.getKey(); V v = e.getValue(); if (checkNulls) { Objects.requireNonNull(k, "key"); Objects.requireNonNull(v, "value"); } if (skipNulls && (k == null || v == null)) { return Collections.emptyMap(); } return Collections.singletonMap(k, v); } default: { Map<K, V> linkedMap = new LinkedHashMap<>(map.size()); if (skipNulls || checkNulls) { for (Map.Entry<? extends K, ? extends V> e : map.entrySet()) { K k = e.getKey(); V v = e.getValue(); if (skipNulls) { if (k == null || v == null) continue; } else if (checkNulls) { Objects.requireNonNull(k, "key"); Objects.requireNonNull(v, "value"); } linkedMap.put(k, v); } } else { linkedMap.putAll(map); } return Collections.unmodifiableMap(linkedMap); } } } }
Builder
java
apache__camel
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/LumberjackEndpointBuilderFactory.java
{ "start": 8702, "end": 9120 }
interface ____ { /** * Lumberjack (camel-lumberjack) * Receive logs messages using the Lumberjack protocol. * * Category: monitoring * Since: 2.18 * Maven coordinates: org.apache.camel:camel-lumberjack * * Syntax: <code>lumberjack:host:port</code> * * Path parameter: host (required) * Network
LumberjackBuilders
java
apache__hadoop
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsVisitor.java
{ "start": 1070, "end": 2112 }
interface ____ { /** * Callback for integer value gauges * @param info the metric info * @param value of the metric */ public void gauge(MetricsInfo info, int value); /** * Callback for long value gauges * @param info the metric info * @param value of the metric */ public void gauge(MetricsInfo info, long value); /** * Callback for float value gauges * @param info the metric info * @param value of the metric */ public void gauge(MetricsInfo info, float value); /** * Callback for double value gauges * @param info the metric info * @param value of the metric */ public void gauge(MetricsInfo info, double value); /** * Callback for integer value counters * @param info the metric info * @param value of the metric */ public void counter(MetricsInfo info, int value); /** * Callback for long value counters * @param info the metric info * @param value of the metric */ public void counter(MetricsInfo info, long value); }
MetricsVisitor
java
quarkusio__quarkus
extensions/grpc/deployment/src/test/java/io/quarkus/grpc/protov2/HelloServiceV2.java
{ "start": 281, "end": 584 }
class ____ extends MutinyGreeterGrpc.GreeterImplBase { @Override public Uni<HelloReply> sayHello(HelloRequest request) { String name = request.getName(); return Uni.createFrom().item(name).map(s -> HelloReply.newBuilder().setMessage("hello " + name).build()); } }
HelloServiceV2
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/inheritance/mixed/SubclassTest.java
{ "start": 884, "end": 2218 }
class ____ { @Test public void testDefault(SessionFactoryScope scope) { File doc = new Document( "Enron Stuff To Shred", 1000 ); Folder folder = new Folder( "Enron" ); scope.inTransaction( session -> { session.persist( doc ); session.persist( folder ); } ); scope.inTransaction( session -> { CriteriaBuilder criteriaBuilder = session.getCriteriaBuilder(); CriteriaQuery<File> criteria = criteriaBuilder.createQuery( File.class ); criteria.from( File.class ); List<File> result = session.createQuery( criteria ).list(); // List result = session.createCriteria( File.class ).list(); assertNotNull( result ); assertEquals( 2, result.size() ); File f2 = result.get( 0 ); checkClassType( f2, doc, folder ); f2 = result.get( 1 ); checkClassType( f2, doc, folder ); session.remove( result.get( 0 ) ); session.remove( result.get( 1 ) ); } ); } private void checkClassType(File fruitToTest, File f, Folder a) { if ( fruitToTest.getName().equals( f.getName() ) ) { assertFalse( fruitToTest instanceof Folder ); } else if ( fruitToTest.getName().equals( a.getName() ) ) { assertTrue( fruitToTest instanceof Folder ); } else { fail( "Result does not contains the previously inserted elements" ); } } }
SubclassTest
java
spring-projects__spring-framework
spring-core/src/test/java/org/springframework/util/AssertTests.java
{ "start": 18764, "end": 19044 }
class ____.lang.String"); } @Test void isAssignableWithTypeMismatchAndCustomMessageWithSpace() { assertThatIllegalArgumentException().isThrownBy(() -> Assert.isAssignable(String.class, Integer.class, "Custom message for ")) .withMessageContaining("Custom message for
java
java
apache__logging-log4j2
log4j-core/src/main/java/org/apache/logging/log4j/core/config/AppenderControlArraySet.java
{ "start": 1289, "end": 5254 }
class ____ { private static final AtomicReferenceFieldUpdater<AppenderControlArraySet, AppenderControl[]> appenderArrayUpdater = AtomicReferenceFieldUpdater.newUpdater( AppenderControlArraySet.class, AppenderControl[].class, "appenderArray"); private volatile AppenderControl[] appenderArray = AppenderControl.EMPTY_ARRAY; /** * Adds an AppenderControl to this set. If this set already contains the element, the call leaves the set unchanged * and returns false. * * @param control The AppenderControl to add. * @return true if this set did not already contain the specified element */ public boolean add(final AppenderControl control) { boolean success; do { final AppenderControl[] original = appenderArray; for (final AppenderControl existing : original) { if (existing.equals(control)) { return false; // the appender is already in the list } } final AppenderControl[] copy = Arrays.copyOf(original, original.length + 1); copy[copy.length - 1] = control; success = appenderArrayUpdater.compareAndSet(this, original, copy); } while (!success); // could not swap: array was modified by another thread return true; // successfully added } /** * Removes the AppenderControl with the specific name and returns it (or {@code null} if no such appender existed). * * @param name The name of the AppenderControl to remove * @return the removed AppenderControl or {@code null} */ public AppenderControl remove(final String name) { boolean success; do { success = true; final AppenderControl[] original = appenderArray; for (int i = 0; i < original.length; i++) { final AppenderControl appenderControl = original[i]; if (Objects.equals(name, appenderControl.getAppenderName())) { final AppenderControl[] copy = removeElementAt(i, original); if (appenderArrayUpdater.compareAndSet(this, original, copy)) { return appenderControl; // successfully removed } success = false; // could not swap: array was modified by another thread break; } } } while (!success); return null; // not found } private AppenderControl[] removeElementAt(final int i, final AppenderControl[] array) { final AppenderControl[] result = Arrays.copyOf(array, array.length - 1); System.arraycopy(array, i + 1, result, i, result.length - i); return result; } /** * Returns all Appenders as a Map. * * @return a Map with the Appender name as the key and the Appender as the value. */ public Map<String, Appender> asMap() { final Map<String, Appender> result = new HashMap<>(); for (final AppenderControl appenderControl : appenderArray) { result.put(appenderControl.getAppenderName(), appenderControl.getAppender()); } return result; } /** * Atomically sets the values to an empty array and returns the old array. * * @return the contents before this collection was cleared. */ public AppenderControl[] clear() { return appenderArrayUpdater.getAndSet(this, AppenderControl.EMPTY_ARRAY); } public boolean isEmpty() { return appenderArray.length == 0; } /** * Returns the underlying array. * * @return the array supporting this collection */ public AppenderControl[] get() { return appenderArray; } @Override public String toString() { return "AppenderControlArraySet [appenderArray=" + Arrays.toString(appenderArray) + "]"; } }
AppenderControlArraySet
java
apache__camel
components/camel-aws/camel-aws2-textract/src/main/java/org/apache/camel/component/aws2/textract/Textract2Configuration.java
{ "start": 1181, "end": 9723 }
class ____ implements Cloneable { @UriPath(description = "Logical name") @Metadata(required = true) private String label; @UriParam(label = "advanced") @Metadata(autowired = true) private TextractClient textractClient; @UriParam(label = "security", secret = true) private String accessKey; @UriParam(label = "security", secret = true) private String secretKey; @UriParam(label = "security", secret = true) private String sessionToken; @UriParam(defaultValue = "detectDocumentText") @Metadata(required = true) private Textract2Operations operation = Textract2Operations.detectDocumentText; @UriParam(label = "proxy", enums = "HTTP,HTTPS", defaultValue = "HTTPS") private Protocol proxyProtocol = Protocol.HTTPS; @UriParam(label = "proxy") private String proxyHost; @UriParam(label = "proxy") private Integer proxyPort; @UriParam(enums = "ap-south-2,ap-south-1,eu-south-1,eu-south-2,us-gov-east-1,me-central-1,il-central-1,ca-central-1,eu-central-1,us-iso-west-1,eu-central-2,eu-isoe-west-1,us-west-1,us-west-2,af-south-1,eu-north-1,eu-west-3,eu-west-2,eu-west-1,ap-northeast-3,ap-northeast-2,ap-northeast-1,me-south-1,sa-east-1,ap-east-1,cn-north-1,ca-west-1,us-gov-west-1,ap-southeast-1,ap-southeast-2,us-iso-east-1,ap-southeast-3,ap-southeast-4,us-east-1,us-east-2,cn-northwest-1,us-isob-east-1,aws-global,aws-cn-global,aws-us-gov-global,aws-iso-global,aws-iso-b-global") private String region; @UriParam private boolean pojoRequest; @UriParam(label = "security") private boolean trustAllCertificates; @UriParam private boolean overrideEndpoint; @UriParam private String uriEndpointOverride; @UriParam(label = "security") private boolean useDefaultCredentialsProvider; @UriParam(label = "security") private boolean useProfileCredentialsProvider; @UriParam(label = "security") private boolean useSessionCredentials; @UriParam(label = "security") private String profileCredentialsName; @UriParam private String s3Bucket; @UriParam private String s3Object; @UriParam private String s3ObjectVersion; public TextractClient getTextractClient() { return textractClient; } /** * To use an existing configured AWS Textract client */ public void setTextractClient(TextractClient textractClient) { this.textractClient = textractClient; } public String getAccessKey() { return accessKey; } /** * Amazon AWS Access Key */ public void setAccessKey(String accessKey) { this.accessKey = accessKey; } public String getSecretKey() { return secretKey; } /** * Amazon AWS Secret Key */ public void setSecretKey(String secretKey) { this.secretKey = secretKey; } public String getSessionToken() { return sessionToken; } /** * Amazon AWS Session Token */ public void setSessionToken(String sessionToken) { this.sessionToken = sessionToken; } public Textract2Operations getOperation() { return operation; } /** * The operation to perform */ public void setOperation(Textract2Operations operation) { this.operation = operation; } public Protocol getProxyProtocol() { return proxyProtocol; } /** * To define a proxy protocol when instantiating the Textract client */ public void setProxyProtocol(Protocol proxyProtocol) { this.proxyProtocol = proxyProtocol; } public String getProxyHost() { return proxyHost; } /** * To define a proxy host when instantiating the Textract client */ public void setProxyHost(String proxyHost) { this.proxyHost = proxyHost; } public Integer getProxyPort() { return proxyPort; } /** * To define a proxy port when instantiating the Textract client */ public void setProxyPort(Integer proxyPort) { this.proxyPort = proxyPort; } public String getRegion() { return region; } /** * The region in which Textract client needs to work. When using this parameter, the configuration will expect the * lowercase name of the region (for example ap-east-1) You'll need to use the name Region.EU_WEST_1.id() */ public void setRegion(String region) { this.region = region; } public boolean isPojoRequest() { return pojoRequest; } /** * If we want to use a POJO request as body or not */ public void setPojoRequest(boolean pojoRequest) { this.pojoRequest = pojoRequest; } public boolean isTrustAllCertificates() { return trustAllCertificates; } /** * If we want to trust all certificates in case of overriding the endpoint */ public void setTrustAllCertificates(boolean trustAllCertificates) { this.trustAllCertificates = trustAllCertificates; } public boolean isOverrideEndpoint() { return overrideEndpoint; } /** * Set the need for overriding the endpoint. This option needs to be used in combination with uriEndpointOverride * option */ public void setOverrideEndpoint(boolean overrideEndpoint) { this.overrideEndpoint = overrideEndpoint; } public String getUriEndpointOverride() { return uriEndpointOverride; } /** * Set the overriding uri endpoint. This option needs to be used in combination with overrideEndpoint option */ public void setUriEndpointOverride(String uriEndpointOverride) { this.uriEndpointOverride = uriEndpointOverride; } public boolean isUseDefaultCredentialsProvider() { return useDefaultCredentialsProvider; } /** * Set whether the Textract client should expect to load credentials through a default credentials provider or to * expect static credentials to be passed in. */ public void setUseDefaultCredentialsProvider(boolean useDefaultCredentialsProvider) { this.useDefaultCredentialsProvider = useDefaultCredentialsProvider; } public boolean isUseProfileCredentialsProvider() { return useProfileCredentialsProvider; } /** * Set whether the Textract client should expect to load credentials through a profile credentials provider. */ public void setUseProfileCredentialsProvider(boolean useProfileCredentialsProvider) { this.useProfileCredentialsProvider = useProfileCredentialsProvider; } public boolean isUseSessionCredentials() { return useSessionCredentials; } /** * Set whether the Textract client should expect to use Session Credentials. This is useful in situation in which * the user needs to assume a IAM role for doing operations in Textract. */ public void setUseSessionCredentials(boolean useSessionCredentials) { this.useSessionCredentials = useSessionCredentials; } public String getProfileCredentialsName() { return profileCredentialsName; } /** * If using a profile credentials provider this parameter will set the profile name. */ public void setProfileCredentialsName(String profileCredentialsName) { this.profileCredentialsName = profileCredentialsName; } public String getLabel() { return label; } /** * A logical name to use. */ public void setLabel(String label) { this.label = label; } public String getS3Bucket() { return s3Bucket; } /** * The S3 bucket name for document location */ public void setS3Bucket(String s3Bucket) { this.s3Bucket = s3Bucket; } public String getS3Object() { return s3Object; } /** * The S3 object name for document location */ public void setS3Object(String s3Object) { this.s3Object = s3Object; } public String getS3ObjectVersion() { return s3ObjectVersion; } /** * The S3 object version for document location */ public void setS3ObjectVersion(String s3ObjectVersion) { this.s3ObjectVersion = s3ObjectVersion; } public Textract2Configuration copy() { try { return (Textract2Configuration) super.clone(); } catch (CloneNotSupportedException e) { throw new RuntimeCamelException(e); } } }
Textract2Configuration
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/insert/MySqlInsertTest_5.java
{ "start": 1049, "end": 2012 }
class ____ extends MysqlTest { public void test_0() throws Exception { String sql = "INSERT INTO tbl_name (a,b,c) VALUES(1,2,3,4,5,6,7,8,9);"; MySqlStatementParser parser = new MySqlStatementParser(sql); List<SQLStatement> statementList = parser.parseStatementList(); SQLStatement stmt = statementList.get(0); MySqlInsertStatement insertStmt = (MySqlInsertStatement) stmt; assertEquals(3, insertStmt.getColumns().size()); assertEquals(1, insertStmt.getValuesList().size()); assertEquals(9, insertStmt.getValuesList().get(0).getValues().size()); assertEquals(1, statementList.size()); MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor(); stmt.accept(visitor); assertEquals("INSERT INTO tbl_name (a, b, c)" + "\nVALUES (1, 2, 3, 4, 5" + "\n\t, 6, 7, 8, 9);", SQLUtils.toMySqlString(insertStmt)); } }
MySqlInsertTest_5
java
apache__commons-lang
src/test/java/org/apache/commons/lang3/concurrent/EventCountCircuitBreakerTest.java
{ "start": 1757, "end": 3471 }
class ____ implements PropertyChangeListener { /** The expected event source. */ private final Object expectedSource; /** A list with the updated values extracted from received change events. */ private final List<Boolean> changedValues; /** * Creates a new instance of {@code ChangeListener} and sets the expected event * source. * * @param source the expected event source */ ChangeListener(final Object source) { expectedSource = source; changedValues = new ArrayList<>(); } @Override public void propertyChange(final PropertyChangeEvent evt) { assertEquals(expectedSource, evt.getSource(), "Wrong event source"); assertEquals("open", evt.getPropertyName(), "Wrong property name"); final Boolean newValue = (Boolean) evt.getNewValue(); final Boolean oldValue = (Boolean) evt.getOldValue(); assertNotEquals(newValue, oldValue, "Old and new value are equal"); changedValues.add(newValue); } /** * Verifies that change events for the expected values have been received. * * @param values the expected values */ public void verify(final Boolean... values) { assertArrayEquals(values, changedValues.toArray(ArrayUtils.EMPTY_BOOLEAN_OBJECT_ARRAY)); } } /** * A test implementation of {@code EventCountCircuitBreaker} which supports mocking the timer. * This is useful for the creation of deterministic tests for switching the circuit * breaker's state. */ private static final
ChangeListener
java
elastic__elasticsearch
x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/analyze/ContendedRegisterAnalyzeAction.java
{ "start": 9037, "end": 12914 }
class ____ extends LegacyActionRequest { private final String repositoryName; private final String containerPath; private final String registerName; private final int requestCount; private final int initialRead; Request(String repositoryName, String containerPath, String registerName, int requestCount, int initialRead) { this.repositoryName = repositoryName; this.containerPath = containerPath; this.registerName = registerName; this.requestCount = requestCount; this.initialRead = initialRead; } Request(StreamInput in) throws IOException { super(in); assert in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0); repositoryName = in.readString(); containerPath = in.readString(); registerName = in.readString(); requestCount = in.readVInt(); initialRead = in.readVInt(); } @Override public void writeTo(StreamOutput out) throws IOException { assert out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0); super.writeTo(out); out.writeString(repositoryName); out.writeString(containerPath); out.writeString(registerName); out.writeVInt(requestCount); out.writeVInt(initialRead); } @Override public ActionRequestValidationException validate() { return null; } String getRepositoryName() { return repositoryName; } String getContainerPath() { return containerPath; } String getRegisterName() { return registerName; } int getRequestCount() { return requestCount; } int getInitialRead() { return initialRead; } @Override public Task createTask(long id, String type, String action, TaskId parentTaskId, Map<String, String> headers) { return new CancellableTask(id, type, action, getDescription(), parentTaskId, headers); } @Override public String toString() { return getDescription(); } @Override public String getDescription() { return Strings.format( """ ContendedRegisterAnalyzeAction.Request{\ repositoryName='%s', containerPath='%s', registerName='%s', requestCount='%d', initialRead='%d'}""", repositoryName, containerPath, registerName, requestCount, initialRead ); } } static long longFromBytes(BytesReference bytesReference) { if (bytesReference.length() == 0) { return 0L; } else if (bytesReference.length() == Long.BYTES) { try (var baos = new ByteArrayOutputStream(Long.BYTES)) { bytesReference.writeTo(baos); final var bytes = baos.toByteArray(); assert bytes.length == Long.BYTES; return ByteUtils.readLongBE(bytes, 0); } catch (IOException e) { assert false : "no IO takes place"; throw new IllegalStateException("unexpected conversion error", e); } } else { throw new IllegalArgumentException("cannot read long from BytesReference of length " + bytesReference.length()); } } static BytesReference bytesFromLong(long value) { if (value == 0L) { return BytesArray.EMPTY; } else { final var bytes = new byte[Long.BYTES]; ByteUtils.writeLongBE(value, bytes, 0); return new BytesArray(bytes); } } }
Request
java
quarkusio__quarkus
extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/tls/TlsConfigFromRegistryManualTest.java
{ "start": 1149, "end": 3094 }
class ____ { @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar .addClasses(Client.class, Resource.class) .addAsResource(new File("target/certs/tls-test-keystore.jks"), "keystore.jks") .addAsResource(new File("target/certs/tls-test-truststore.jks"), "truststore.jks")) .overrideConfigKey("quarkus.tls.key-store.jks.path", "keystore.jks") .overrideConfigKey("quarkus.tls.key-store.jks.password", "secret") .overrideConfigKey("quarkus.tls.rest-client.trust-store.jks.path", "truststore.jks") .overrideConfigKey("quarkus.tls.rest-client.trust-store.jks.password", "secret"); @TestHTTPResource(tls = true) URL url; @Inject TlsConfigurationRegistry registry; @Test void test() throws IOException { int count = 10; List<Closeable> closeables = new ArrayList<>(count); for (int i = 0; i < count; i++) { Optional<TlsConfiguration> maybeTlsConfiguration = TlsConfiguration.from(registry, Optional.of("rest-client")); assertThat(maybeTlsConfiguration).isPresent(); Client client = QuarkusRestClientBuilder.newBuilder().baseUrl(url).tlsConfiguration(maybeTlsConfiguration.get()) .build(Client.class); assertThat(client.echo("w0rld")).isEqualTo("hello, w0rld"); closeables.add((Closeable) client); } assertThat(RestClientRecorder.clientsUsingTlsConfig("rest-client")).hasSize(count); // manually close all clients for (Closeable closeable : closeables) { closeable.close(); } // assert that the TLS config was cleaned up assertThat(RestClientRecorder.clientsUsingTlsConfig("rest-client")).isEmpty(); } @Path("/hello") public
TlsConfigFromRegistryManualTest