language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
quarkusio__quarkus
|
extensions/panache/mongodb-panache-common/runtime/src/main/java/io/quarkus/mongodb/panache/common/transaction/MongoTransactionException.java
|
{
"start": 63,
"end": 208
}
|
class ____ extends RuntimeException {
public MongoTransactionException(Exception cause) {
super(cause);
}
}
|
MongoTransactionException
|
java
|
spring-projects__spring-security
|
web/src/test/java/org/springframework/security/web/server/FormPostServerRedirectStrategyTests.java
|
{
"start": 1242,
"end": 5423
}
|
class ____ {
private static final String POLICY_DIRECTIVE_PATTERN = "script-src 'nonce-(.+)'";
private final ServerRedirectStrategy redirectStrategy = new FormPostServerRedirectStrategy();
private final MockServerHttpRequest request = MockServerHttpRequest.get("https://localhost").build();
private final MockServerWebExchange webExchange = MockServerWebExchange.from(this.request);
@Test
public void redirectWhetLocationAbsoluteUriIsPresentThenRedirect() {
this.redirectStrategy.sendRedirect(this.webExchange, URI.create("https://example.com")).block();
MockServerHttpResponse response = this.webExchange.getResponse();
assertThat(response.getBodyAsString().block()).contains("action=\"https://example.com\"");
assertThat(this.webExchange.getResponse().getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(this.webExchange.getResponse().getHeaders().getContentType()).isEqualTo(MediaType.TEXT_HTML);
assertThat(this.webExchange.getResponse()).satisfies(hasScriptSrcNonce());
}
@Test
public void redirectWhetLocationRootRelativeUriIsPresentThenRedirect() {
this.redirectStrategy.sendRedirect(this.webExchange, URI.create("/test")).block();
MockServerHttpResponse response = this.webExchange.getResponse();
assertThat(response.getBodyAsString().block()).contains("action=\"/test\"");
assertThat(this.webExchange.getResponse().getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(this.webExchange.getResponse().getHeaders().getContentType()).isEqualTo(MediaType.TEXT_HTML);
assertThat(this.webExchange.getResponse()).satisfies(hasScriptSrcNonce());
}
@Test
public void redirectWhetLocationRelativeUriIsPresentThenRedirect() {
this.redirectStrategy.sendRedirect(this.webExchange, URI.create("test")).block();
MockServerHttpResponse response = this.webExchange.getResponse();
assertThat(response.getBodyAsString().block()).contains("action=\"test\"");
assertThat(this.webExchange.getResponse().getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(this.webExchange.getResponse().getHeaders().getContentType()).isEqualTo(MediaType.TEXT_HTML);
assertThat(this.webExchange.getResponse()).satisfies(hasScriptSrcNonce());
}
@Test
public void redirectWhenLocationAbsoluteUriWithFragmentIsPresentThenRedirect() {
this.redirectStrategy.sendRedirect(this.webExchange, URI.create("https://example.com/path#fragment")).block();
MockServerHttpResponse response = this.webExchange.getResponse();
assertThat(response.getBodyAsString().block()).contains("action=\"https://example.com/path#fragment\"");
assertThat(this.webExchange.getResponse().getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(this.webExchange.getResponse().getHeaders().getContentType()).isEqualTo(MediaType.TEXT_HTML);
assertThat(this.webExchange.getResponse()).satisfies(hasScriptSrcNonce());
}
@Test
public void redirectWhenLocationAbsoluteUriWithQueryParamsIsPresentThenRedirect() {
this.redirectStrategy
.sendRedirect(this.webExchange, URI.create("https://example.com/path?param1=one¶m2=two#fragment"))
.block();
MockServerHttpResponse response = this.webExchange.getResponse();
String content = response.getBodyAsString().block();
assertThat(this.webExchange.getResponse().getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(this.webExchange.getResponse().getHeaders().getContentType()).isEqualTo(MediaType.TEXT_HTML);
assertThat(content).contains("action=\"https://example.com/path#fragment\"");
assertThat(content).contains("<input name=\"param1\" type=\"hidden\" value=\"one\" />");
assertThat(content).contains("<input name=\"param2\" type=\"hidden\" value=\"two\" />");
}
private ThrowingConsumer<MockServerHttpResponse> hasScriptSrcNonce() {
return (response) -> {
final String policyDirective = response.getHeaders().getFirst("Content-Security-Policy");
assertThat(policyDirective).isNotEmpty();
assertThat(policyDirective).matches(POLICY_DIRECTIVE_PATTERN);
final String nonce = policyDirective.replaceFirst(POLICY_DIRECTIVE_PATTERN, "$1");
assertThat(response.getBodyAsString().block()).contains("<script nonce=\"%s\">".formatted(nonce));
};
}
}
|
FormPostServerRedirectStrategyTests
|
java
|
apache__flink
|
flink-end-to-end-tests/flink-batch-sql-test/src/test/java/org/apache/flink/sql/tests/GeneratorTableSource.java
|
{
"start": 1165,
"end": 1860
}
|
class ____ implements ScanTableSource {
private final Generator generator;
GeneratorTableSource(Generator generator) {
this.generator = generator;
}
@Override
public DynamicTableSource copy() {
return new GeneratorTableSource(generator);
}
@Override
public String asSummaryString() {
return "GeneratorTableSource";
}
@Override
public ChangelogMode getChangelogMode() {
return ChangelogMode.insertOnly();
}
@Override
public ScanRuntimeProvider getScanRuntimeProvider(ScanContext runtimeProviderContext) {
return SourceProvider.of(new FromElementsSource<>(generator), 1);
}
}
|
GeneratorTableSource
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/proxy/inlinedirtychecking/EntityWithMutableAttributesTest.java
|
{
"start": 4781,
"end": 5455
}
|
class ____ {
@Id
private Integer id;
@NotNull
private String name;
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "t_date")
private Date date;
private String description;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Date getDate() {
return date;
}
public void setDate(Date date) {
this.date = date;
}
}
}
|
Role
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/discriminator/SingleTableInheritancePersistTest.java
|
{
"start": 4261,
"end": 5150
}
|
class ____ {
@Id
private String name;
@OneToMany(mappedBy = "familyName", cascade = CascadeType.ALL, orphanRemoval = true)
private List<Person> members = new ArrayList<>();
public Family() {
}
public Family(String name) {
this.name = name;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<Person> getMembers() {
return members;
}
public void setMembers(List<Person> members) {
this.members = members;
}
public void add(Person person) {
person.setFamilyName( this );
members.add( person );
}
@Override
public String toString() {
return "Family [name=" + name + "]";
}
}
@Entity(name = "Person")
@Inheritance(strategy = InheritanceType.SINGLE_TABLE)
@DiscriminatorColumn(name = "TYPE", discriminatorType = DiscriminatorType.STRING)
public static
|
Family
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/main/java/org/springframework/web/servlet/function/RequestPredicates.java
|
{
"start": 24497,
"end": 25149
}
|
class ____ implements RequestPredicate {
private final Predicate<ServerRequest.Headers> headersPredicate;
public HeadersPredicate(Predicate<ServerRequest.Headers> headersPredicate) {
Assert.notNull(headersPredicate, "Predicate must not be null");
this.headersPredicate = headersPredicate;
}
@Override
public boolean test(ServerRequest request) {
if (CorsUtils.isPreFlightRequest(request.servletRequest())) {
return true;
}
else {
return this.headersPredicate.test(request.headers());
}
}
@Override
public String toString() {
return this.headersPredicate.toString();
}
}
private static
|
HeadersPredicate
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/format/number/NumberFormatAnnotationFormatterFactory.java
|
{
"start": 1349,
"end": 2374
}
|
class ____ extends EmbeddedValueResolutionSupport
implements AnnotationFormatterFactory<NumberFormat> {
@Override
public Set<Class<?>> getFieldTypes() {
return NumberUtils.STANDARD_NUMBER_TYPES;
}
@Override
public Printer<Number> getPrinter(NumberFormat annotation, Class<?> fieldType) {
return configureFormatterFrom(annotation);
}
@Override
public Parser<Number> getParser(NumberFormat annotation, Class<?> fieldType) {
return configureFormatterFrom(annotation);
}
private Formatter<Number> configureFormatterFrom(NumberFormat annotation) {
String pattern = resolveEmbeddedValue(annotation.pattern());
if (StringUtils.hasLength(pattern)) {
return new NumberStyleFormatter(pattern);
}
else {
Style style = annotation.style();
if (style == Style.CURRENCY) {
return new CurrencyStyleFormatter();
}
else if (style == Style.PERCENT) {
return new PercentStyleFormatter();
}
else {
return new NumberStyleFormatter();
}
}
}
}
|
NumberFormatAnnotationFormatterFactory
|
java
|
apache__kafka
|
storage/src/test/java/org/apache/kafka/server/log/remote/metadata/storage/RemoteLogMetadataFormatterTest.java
|
{
"start": 1762,
"end": 4018
}
|
class ____ {
private static final Uuid TOPIC_ID = Uuid.randomUuid();
private static final String TOPIC = "foo";
private static final TopicIdPartition TP0 = new TopicIdPartition(TOPIC_ID, new TopicPartition(TOPIC, 0));
private static final Uuid SEGMENT_ID = Uuid.randomUuid();
@Test
public void testFormat() throws IOException {
Map<Integer, Long> segLeaderEpochs = new HashMap<>();
segLeaderEpochs.put(0, 0L);
segLeaderEpochs.put(1, 20L);
segLeaderEpochs.put(2, 80L);
RemoteLogSegmentId remoteLogSegmentId = new RemoteLogSegmentId(TP0, SEGMENT_ID);
Optional<CustomMetadata> customMetadata = Optional.of(new CustomMetadata(new byte[10]));
RemoteLogSegmentMetadata remoteLogMetadata = new RemoteLogSegmentMetadata(
remoteLogSegmentId, 0L, 100L, -1L, 1, 123L, 1024, customMetadata, COPY_SEGMENT_STARTED,
segLeaderEpochs, true);
byte[] metadataBytes = new RemoteLogMetadataSerde().serialize(remoteLogMetadata);
ConsumerRecord<byte[], byte[]> metadataRecord = new ConsumerRecord<>(
"__remote_log_metadata", 0, 0, null, metadataBytes);
String expected = String.format(
"partition: 0, offset: 0, value: " +
"RemoteLogSegmentMetadata{remoteLogSegmentId=RemoteLogSegmentId{topicIdPartition=%s:foo-0, id=%s}, " +
"startOffset=0, endOffset=100, brokerId=1, maxTimestampMs=-1, " +
"eventTimestampMs=123, segmentLeaderEpochs={0=0, 1=20, 2=80}, segmentSizeInBytes=1024, " +
"customMetadata=Optional[CustomMetadata{10 bytes}], " +
"state=COPY_SEGMENT_STARTED, txnIdxEmpty=true}\n",
TOPIC_ID, SEGMENT_ID);
try (ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos)) {
try (RemoteLogMetadataSerde.RemoteLogMetadataFormatter formatter =
new RemoteLogMetadataSerde.RemoteLogMetadataFormatter()) {
formatter.writeTo(metadataRecord, ps);
assertEquals(expected, baos.toString());
}
}
}
}
|
RemoteLogMetadataFormatterTest
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/Boolean2DArraysBaseTest.java
|
{
"start": 888,
"end": 1113
}
|
class ____ testing <code>{@link Boolean2DArrays}</code>.
* <p>
* Is in <code>org.assertj.core.internal</code> package to be able to set {@link Boolean2DArrays#failures} appropriately.
*
* @author Maciej Wajcht
*/
public
|
for
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/StatementSwitchToExpressionSwitchTest.java
|
{
"start": 112406,
"end": 113672
}
|
class ____ {
int[] x;
public Test(int foo) {
x = null;
}
public int[] foo(Suit suit) {
x[6] <<=
switch (suit) {
case HEART -> throw new RuntimeException();
case DIAMOND -> (((x[6] + 1) * (x[6] * x[5]) << 1));
case SPADE -> throw new RuntimeException();
default -> throw new NullPointerException();
};
return x;
}
}
""")
.setArgs(
"-XepOpt:StatementSwitchToExpressionSwitch:EnableAssignmentSwitchConversion",
"-XepOpt:StatementSwitchToExpressionSwitch:EnableDirectConversion=false")
.setFixChooser(StatementSwitchToExpressionSwitchTest::assertOneFixAndChoose)
.doTest();
}
@Test
public void switchByEnum_assignmentSwitchToMultipleArray_noError() {
// Multiple array dereferences or other non-variable left-hand-suit expressions may (in
// principle) be convertible to assignment switches, but this feature is not supported at this
// time
helper
.addSourceLines(
"Test.java",
"""
|
Test
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/config/InspectionClassLoader.java
|
{
"start": 879,
"end": 961
}
|
class ____ at a later time.
*
* @author Mark Paluch
* @since 2.1
*/
|
transformation
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/prefetch/TestS3ARemoteObject.java
|
{
"start": 1457,
"end": 3418
}
|
class ____ extends AbstractHadoopTestBase {
private final ExecutorService threadPool = Executors.newFixedThreadPool(1);
private final ExecutorServiceFuturePool futurePool =
new ExecutorServiceFuturePool(threadPool);
private final ObjectInputStreamCallbacks client =
MockS3ARemoteObject.createClient("bucket");
@Test
public void testArgChecks() throws Exception {
S3AReadOpContext readContext =
S3APrefetchFakes.createReadContext(futurePool, "key", 10);
S3ObjectAttributes attrs =
S3APrefetchFakes.createObjectAttributes("bucket", "key", 10);
S3AInputStreamStatistics stats =
readContext.getS3AStatisticsContext().newInputStreamStatistics();
ChangeTracker changeTracker =
S3APrefetchFakes.createChangeTracker("bucket", "key", 10);
// Should not throw.
new S3ARemoteObject(readContext, attrs, client, stats, changeTracker);
ExceptionAsserts.assertThrows(
IllegalArgumentException.class,
"'context' must not be null",
() -> new S3ARemoteObject(null, attrs, client, stats, changeTracker));
ExceptionAsserts.assertThrows(
IllegalArgumentException.class,
"'s3Attributes' must not be null",
() -> new S3ARemoteObject(readContext, null, client, stats,
changeTracker));
ExceptionAsserts.assertThrows(
IllegalArgumentException.class,
"'client' must not be null",
() -> new S3ARemoteObject(readContext, attrs, null, stats,
changeTracker));
ExceptionAsserts.assertThrows(
IllegalArgumentException.class,
"'streamStatistics' must not be null",
() -> new S3ARemoteObject(readContext, attrs, client, null,
changeTracker));
ExceptionAsserts.assertThrows(
IllegalArgumentException.class,
"'changeTracker' must not be null",
() -> new S3ARemoteObject(readContext, attrs, client, stats, null));
}
}
|
TestS3ARemoteObject
|
java
|
quarkusio__quarkus
|
extensions/keycloak-authorization/runtime/src/main/java/io/quarkus/keycloak/pep/runtime/KeycloakPolicyEnforcerConfig.java
|
{
"start": 454,
"end": 808
}
|
interface ____ {
/**
* The default tenant.
*/
@WithParentName
KeycloakPolicyEnforcerTenantConfig defaultTenant();
/**
* Additional named tenants.
*/
@ConfigDocSection
@ConfigDocMapKey("tenant")
@WithParentName
Map<String, KeycloakPolicyEnforcerTenantConfig> namedTenants();
}
|
KeycloakPolicyEnforcerConfig
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
|
{
"start": 3891,
"end": 5727
}
|
class ____ implements Tool {
// Constants
private static final Logger LOG = LoggerFactory.getLogger(TestDFSIO.class);
private static final int DEFAULT_BUFFER_SIZE = 1000000;
private static final String BASE_FILE_NAME = "test_io_";
private static final String DEFAULT_RES_FILE_NAME = "TestDFSIO_results.log";
private static final long MEGA = ByteMultiple.MB.value();
private static final int DEFAULT_NR_BYTES = 128;
private static final int DEFAULT_NR_FILES = 4;
private static final String USAGE =
"Usage: " + TestDFSIO.class.getSimpleName() +
" [genericOptions]" +
" -read [-random | -backward | -skip [-skipSize Size]] |" +
" -write | -append | -truncate | -clean" +
" [-compression codecClassName]" +
" [-nrFiles N]" +
" [-size Size[B|KB|MB|GB|TB]]" +
" [-resFile resultFileName] [-bufferSize Bytes]" +
" [-storagePolicy storagePolicyName]" +
" [-erasureCodePolicy erasureCodePolicyName]";
private Configuration config;
private static final String STORAGE_POLICY_NAME_KEY =
"test.io.block.storage.policy";
private static final String ERASURE_CODE_POLICY_NAME_KEY =
"test.io.erasure.code.policy";
private ExecutorService excutorService = Executors.newFixedThreadPool(
2 * Runtime.getRuntime().availableProcessors());
private CompletionService<String> completionService =
new ExecutorCompletionService<>(excutorService);
static{
Configuration.addDefaultResource("hdfs-default.xml");
Configuration.addDefaultResource("hdfs-site.xml");
Configuration.addDefaultResource("mapred-default.xml");
Configuration.addDefaultResource("mapred-site.xml");
}
private
|
TestDFSIO
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusResponse.java
|
{
"start": 614,
"end": 1931
}
|
class ____ extends ActionResponse implements ToXContentObject {
private final boolean eligibleToStartTrial;
GetTrialStatusResponse(StreamInput in) throws IOException {
eligibleToStartTrial = in.readBoolean();
}
public GetTrialStatusResponse(boolean eligibleToStartTrial) {
this.eligibleToStartTrial = eligibleToStartTrial;
}
boolean isEligibleToStartTrial() {
return eligibleToStartTrial;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(eligibleToStartTrial);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
GetTrialStatusResponse that = (GetTrialStatusResponse) o;
return eligibleToStartTrial == that.eligibleToStartTrial;
}
@Override
public int hashCode() {
return Objects.hash(eligibleToStartTrial);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("eligible_to_start_trial", eligibleToStartTrial);
builder.endObject();
return builder;
}
}
|
GetTrialStatusResponse
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/strategy/spi/AuditStrategyContext.java
|
{
"start": 423,
"end": 672
}
|
class ____ of the revision entity
*/
Class<?> getRevisionInfoClass();
/**
* Get the revision info timestamp accessor
* @return the getter for the timestamp attribute on the revision entity
*/
Getter getRevisionInfoTimestampAccessor();
}
|
name
|
java
|
quarkusio__quarkus
|
core/runtime/src/main/java/io/quarkus/logging/Log.java
|
{
"start": 86660,
"end": 91662
}
|
class ____
* @param level the level
* @param message the message
* @param params the message parameters
* @param t the throwable
*/
public static void log(String loggerFqcn, Logger.Level level, Object message, Object[] params, Throwable t) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).log(loggerFqcn, level, message, params, t);
}
/**
* Issue a log message at the given log level using {@link java.text.MessageFormat}-style formatting.
*
* @param level the level
* @param format the message format string
* @param params the parameters
*/
public static void logv(Logger.Level level, String format, Object... params) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).logv(level, format, params);
}
/**
* Issue a log message at the given log level using {@link java.text.MessageFormat}-style formatting.
*
* @param level the level
* @param format the message format string
* @param param1 the sole parameter
*/
public static void logv(Logger.Level level, String format, Object param1) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).logv(level, format, param1);
}
/**
* Issue a log message at the given log level using {@link java.text.MessageFormat}-style formatting.
*
* @param level the level
* @param format the message format string
* @param param1 the first parameter
* @param param2 the second parameter
*/
public static void logv(Logger.Level level, String format, Object param1, Object param2) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).logv(level, format, param1, param2);
}
/**
* Issue a log message at the given log level using {@link java.text.MessageFormat}-style formatting.
*
* @param level the level
* @param format the message format string
* @param param1 the first parameter
* @param param2 the second parameter
* @param param3 the third parameter
*/
public static void logv(Logger.Level level, String format, Object param1, Object param2, Object param3) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).logv(level, format, param1, param2, param3);
}
/**
* Issue a log message at the given log level using {@link java.text.MessageFormat}-style formatting.
*
* @param level the level
* @param t the throwable
* @param format the message format string
* @param params the parameters
*/
public static void logv(Logger.Level level, Throwable t, String format, Object... params) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).logv(level, t, format, params);
}
/**
* Issue a log message at the given log level using {@link java.text.MessageFormat}-style formatting.
*
* @param level the level
* @param t the throwable
* @param format the message format string
* @param param1 the sole parameter
*/
public static void logv(Logger.Level level, Throwable t, String format, Object param1) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).logv(level, t, format, param1);
}
/**
* Issue a log message at the given log level using {@link java.text.MessageFormat}-style formatting.
*
* @param level the level
* @param t the throwable
* @param format the message format string
* @param param1 the first parameter
* @param param2 the second parameter
*/
public static void logv(Logger.Level level, Throwable t, String format, Object param1, Object param2) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).logv(level, t, format, param1, param2);
}
/**
* Issue a log message at the given log level using {@link java.text.MessageFormat}-style formatting.
*
* @param level the level
* @param t the throwable
* @param format the message format string
* @param param1 the first parameter
* @param param2 the second parameter
* @param param3 the third parameter
*/
public static void logv(Logger.Level level, Throwable t, String format, Object param1, Object param2, Object param3) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).logv(level, t, format, param1, param2, param3);
}
/**
* Issue a log message at the given log level using {@link java.text.MessageFormat}-style formatting.
*
* @param loggerFqcn the logger
|
name
|
java
|
apache__camel
|
components/camel-opentelemetry/src/test/java/org/apache/camel/opentelemetry/OpenTelemetryTracingStrategyPropagateContextTest.java
|
{
"start": 1681,
"end": 4905
}
|
class ____ extends CamelOpenTelemetryTestSupport {
@TempDir
private static Path tempDirectory;
private static final SpanTestData[] testdata = {
new SpanTestData().setLabel("camel-process").setOperation("delayed")
.setParentId(2),
new SpanTestData().setLabel("camel-process").setOperation("WithSpan.secondMethod")
.setParentId(2),
new SpanTestData().setLabel("camel-process").setOperation("file").setKind(SpanKind.SERVER)
};
OpenTelemetryTracingStrategyPropagateContextTest() {
super(testdata);
}
@BeforeAll
public static void createFile() throws IOException {
Files.createFile(tempDirectory.resolve("file.txt"));
}
@Test
void testTracingOfProcessors() {
NotifyBuilder notify = new NotifyBuilder(context).whenDone(1).create();
assertTrue(notify.matches(30, TimeUnit.SECONDS));
verify(true);
}
@Override
protected String getExcludePatterns() {
return "longRunningProcess";
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from(fileUri(tempDirectory)).routeId("serviceA")
.process(exchange -> {
longRunningProcess();
}).id("longRunningProcess")
.delay(simple("${random(0,500)}")).id("delayed");
}
private void longRunningProcess() {
firstMethod();
secondMethod();
}
private void firstMethod() {
// no Span created by Camel
}
// Simulate io.opentelemetry.instrumentation.annotations.@WithSpan
// in order to avoid having to start an HTTP sever just to collect the Spans
// see https://github.com/open-telemetry/opentelemetry-java-examples/tree/main/telemetry-testing
//@WithSpan
public void secondMethod() {
// The Context should be propagated
Assertions.assertNotSame(Context.root(), Context.current(), "OpenTelemetry was not propagated !");
// build and start a custom Span similar to what @WithSpan would do
SpanBuilder builder = getOtTracer().getTracer().spanBuilder("WithSpan.secondMethod");
Span span = builder.setParent(Context.current())
.setAttribute(COMPONENT_KEY, "custom")
.startSpan();
//noinspection EmptyTryBlock
try (Scope ignored = span.makeCurrent()) {
// do work
} finally {
span.end();
}
}
};
}
@Override
protected Function<OpenTelemetryTracer, InterceptStrategy> getTracingStrategy() {
return (tracer) -> {
OpenTelemetryTracingStrategy strategy = new OpenTelemetryTracingStrategy(tracer);
strategy.setPropagateContext(true);
return strategy;
};
}
}
|
OpenTelemetryTracingStrategyPropagateContextTest
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/ImportAwareTests.java
|
{
"start": 6613,
"end": 6702
}
|
class ____ {
}
@Configuration
@EnableImportedConfig(foo = "xyz")
static
|
ImportingConfig
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/metrics/groups/InternalSinkWriterMetricGroup.java
|
{
"start": 1422,
"end": 3581
}
|
class ____ extends ProxyMetricGroup<MetricGroup>
implements SinkWriterMetricGroup {
private final Counter numRecordsOutErrors;
private final Counter numRecordsSendErrors;
private final Counter numRecordsWritten;
private final Counter numBytesWritten;
private final OperatorIOMetricGroup operatorIOMetricGroup;
@VisibleForTesting
InternalSinkWriterMetricGroup(
MetricGroup parentMetricGroup, OperatorIOMetricGroup operatorIOMetricGroup) {
super(parentMetricGroup);
numRecordsOutErrors = parentMetricGroup.counter(MetricNames.NUM_RECORDS_OUT_ERRORS);
numRecordsSendErrors =
parentMetricGroup.counter(MetricNames.NUM_RECORDS_SEND_ERRORS, numRecordsOutErrors);
numRecordsWritten =
parentMetricGroup.counter(
MetricNames.NUM_RECORDS_SEND,
operatorIOMetricGroup.getNumRecordsOutCounter());
numBytesWritten =
parentMetricGroup.counter(
MetricNames.NUM_BYTES_SEND, operatorIOMetricGroup.getNumBytesOutCounter());
this.operatorIOMetricGroup = operatorIOMetricGroup;
}
public static InternalSinkWriterMetricGroup wrap(OperatorMetricGroup operatorMetricGroup) {
return new InternalSinkWriterMetricGroup(
operatorMetricGroup, operatorMetricGroup.getIOMetricGroup());
}
@Override
public OperatorIOMetricGroup getIOMetricGroup() {
return operatorIOMetricGroup;
}
@Override
public Counter getNumRecordsOutErrorsCounter() {
return numRecordsOutErrors;
}
@Override
public Counter getNumRecordsSendErrorsCounter() {
return numRecordsSendErrors;
}
@Override
public Counter getNumRecordsSendCounter() {
return numRecordsWritten;
}
@Override
public Counter getNumBytesSendCounter() {
return numBytesWritten;
}
@Override
public void setCurrentSendTimeGauge(Gauge<Long> currentSendTimeGauge) {
parentMetricGroup.gauge(MetricNames.CURRENT_SEND_TIME, currentSendTimeGauge);
}
}
|
InternalSinkWriterMetricGroup
|
java
|
grpc__grpc-java
|
api/src/test/java/io/grpc/ServiceProvidersTestAbstractProvider.java
|
{
"start": 733,
"end": 823
}
|
class ____ that has a '$' in it, which causes
// issues with our build pipeline.
abstract
|
name
|
java
|
quarkusio__quarkus
|
integration-tests/maven/src/test/java/io/quarkus/maven/it/DevMojoIT.java
|
{
"start": 47677,
"end": 48344
}
|
class ____ {\n" +
" public static String message() {\n" +
" return \"to be deleted\";\n" +
" }\n" +
"}";
FileUtils.write(source, classDeletionResource, StandardCharsets.UTF_8);
runAndCheck();
// Wait until source file is compiled
await()
.pollDelay(1, TimeUnit.SECONDS)
.atMost(TestUtils.getDefaultTimeout(), TimeUnit.MINUTES)
.until(() -> devModeClient.getHttpResponse("/app/deletion").contains("to be deleted"));
// Remove InnerClass
filter(source, Collections.singletonMap("public static
|
Hello
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java24/org/springframework/core/type/classreading/ClassFileMetadataReader.java
|
{
"start": 1143,
"end": 2009
}
|
class ____ implements MetadataReader {
private final Resource resource;
private final AnnotationMetadata annotationMetadata;
ClassFileMetadataReader(Resource resource, @Nullable ClassLoader classLoader) throws IOException {
this.resource = resource;
this.annotationMetadata = ClassFileClassMetadata.of(parseClassModel(resource), classLoader);
}
private static ClassModel parseClassModel(Resource resource) throws IOException {
try (InputStream is = resource.getInputStream()) {
byte[] bytes = is.readAllBytes();
return ClassFile.of().parse(bytes);
}
}
@Override
public Resource getResource() {
return this.resource;
}
@Override
public ClassMetadata getClassMetadata() {
return this.annotationMetadata;
}
@Override
public AnnotationMetadata getAnnotationMetadata() {
return this.annotationMetadata;
}
}
|
ClassFileMetadataReader
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/TimeZoneFieldTest.java
|
{
"start": 274,
"end": 1212
}
|
class ____ extends TestCase {
public void test_codec() throws Exception {
User user = new User();
user.setValue(TimeZone.getDefault());
SerializeConfig mapping = new SerializeConfig();
mapping.setAsmEnable(false);
String text = JSON.toJSONString(user, mapping, SerializerFeature.WriteMapNullValue);
User user1 = JSON.parseObject(text, User.class);
Assert.assertEquals(user1.getValue(), user.getValue());
}
public void test_codec_null() throws Exception {
User user = new User();
user.setValue(null);
SerializeConfig mapping = new SerializeConfig();
mapping.setAsmEnable(false);
String text = JSON.toJSONString(user, mapping, SerializerFeature.WriteMapNullValue);
User user1 = JSON.parseObject(text, User.class);
Assert.assertEquals(user1.getValue(), user.getValue());
}
public static
|
TimeZoneFieldTest
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/util/introspection/PropertyOrFieldSupport_getValueOf_Test.java
|
{
"start": 8738,
"end": 8867
}
|
class ____ extends Employee {
public static String city() {
return "London";
}
}
static
|
StaticBarePropertyEmployee
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java
|
{
"start": 26497,
"end": 32152
}
|
class ____ hits/misses, but timing is in DLS cache, which is why we have `2L` here,
// because DLS cache is only hit once
expectedStats.put("misses_time_in_millis", 2L);
assertThat(cache.usageStats(), equalTo(expectedStats));
});
final Map<String, Object> finalStats = emptyStatsSupplier.get();
finalStats.put("hits", 1L);
finalStats.put("misses", 2L);
finalStats.put("evictions", 2L);
finalStats.put("hits_time_in_millis", 1L);
finalStats.put("misses_time_in_millis", 2L);
assertThat(cache.usageStats(), equalTo(finalStats));
}
public void testUsageStatsAreOrdered() {
final Map<String, Object> stats = newCache(Settings.EMPTY).usageStats();
assertThat("needs to be LinkedHashMap for order in transport", stats, instanceOf(LinkedHashMap.class));
}
private void runTestOnIndex(CheckedBiConsumer<SearchExecutionContext, LeafReaderContext, Exception> body) throws Exception {
runTestOnIndices(1, ctx -> {
final TestIndexContext indexContext = ctx.get(0);
body.accept(indexContext.searchExecutionContext, indexContext.leafReaderContext);
});
}
private record TestIndexContext(
Directory directory,
IndexWriter indexWriter,
DirectoryReader directoryReader,
SearchExecutionContext searchExecutionContext,
LeafReaderContext leafReaderContext
) implements Closeable {
@Override
public void close() throws IOException {
directoryReader.close();
indexWriter.close();
directory.close();
}
}
private TestIndexContext testIndex(MappingLookup mappingLookup, Client client) throws IOException {
TestIndexContext context = null;
final long nowInMillis = randomNonNegativeLong();
final ShardId shardId = new ShardId("idx_" + randomAlphaOfLengthBetween(2, 8), randomAlphaOfLength(12), 0);
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(shardId.getIndex(), Settings.EMPTY);
final IndexWriterConfig writerConfig = new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(NoMergePolicy.INSTANCE);
Directory directory = null;
IndexWriter iw = null;
DirectoryReader directoryReader = null;
try {
directory = newDirectory();
iw = new IndexWriter(directory, writerConfig);
for (int i = 1; i <= 100; i++) {
Document document = new Document();
for (int j = 1; j <= FIELD_COUNT; j++) {
document.add(new StringField("field-" + j, "value-" + i, Field.Store.NO));
}
iw.addDocument(document);
}
iw.commit();
directoryReader = DirectoryReader.open(directory);
final LeafReaderContext leaf = directoryReader.leaves().get(0);
final SearchExecutionContext searchExecutionContext = new SearchExecutionContext(
shardId.id(),
0,
indexSettings,
null,
null,
null,
mappingLookup,
null,
null,
parserConfig(),
writableRegistry(),
client,
newSearcher(directoryReader),
() -> nowInMillis,
null,
null,
() -> true,
null,
Map.of(),
MapperMetrics.NOOP
);
context = new TestIndexContext(directory, iw, directoryReader, searchExecutionContext, leaf);
return context;
} finally {
if (context == null) {
if (directoryReader != null) {
directoryReader.close();
}
if (iw != null) {
iw.close();
}
if (directory != null) {
directory.close();
}
}
}
}
private void runTestOnIndices(int numberIndices, CheckedConsumer<List<TestIndexContext>, Exception> body) throws Exception {
List<FieldMapper> types = new ArrayList<>();
for (int i = 0; i < 11; i++) { // the tests use fields 1 to 10.
// This field has a value.
types.add(new MockFieldMapper(new KeywordFieldMapper.KeywordFieldType("field-" + i)));
// This field never has a value
types.add(new MockFieldMapper(new KeywordFieldMapper.KeywordFieldType("dne-" + i)));
}
MappingLookup mappingLookup = MappingLookup.fromMappers(Mapping.EMPTY, types, List.of());
final Client client = mock(Client.class);
when(client.settings()).thenReturn(Settings.EMPTY);
final List<TestIndexContext> context = new ArrayList<>(numberIndices);
try {
for (int i = 0; i < numberIndices; i++) {
context.add(testIndex(mappingLookup, client));
}
body.accept(context);
} finally {
for (TestIndexContext indexContext : context) {
indexContext.close();
}
}
}
private DocumentSubsetBitsetCache newCache(Settings settings) {
final AtomicLong increasingMillisTime = new AtomicLong();
final LongSupplier relativeNanoTimeProvider = () -> TimeUnit.MILLISECONDS.toNanos(increasingMillisTime.getAndIncrement());
return new DocumentSubsetBitsetCache(settings, relativeNanoTimeProvider);
}
}
|
tracks
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/RequestScopedFieldInjectionTest.java
|
{
"start": 1265,
"end": 1547
}
|
class ____ {
@RestPath
private String field;
@GET
@Path("/{parameter}")
@Produces(MediaType.TEXT_PLAIN)
public String hello(String parameter) {
return "field:" + field + "-parameter:" + parameter;
}
}
}
|
Resource
|
java
|
apache__kafka
|
clients/src/test/java/org/apache/kafka/common/security/authenticator/SaslAuthenticatorTest.java
|
{
"start": 60598,
"end": 61855
}
|
class ____.
*/
@Test
public void testClientLoginCallbackOverride() throws Exception {
SecurityProtocol securityProtocol = SecurityProtocol.SASL_PLAINTEXT;
TestJaasConfig jaasConfig = configureMechanisms("PLAIN", Collections.singletonList("PLAIN"));
jaasConfig.createOrUpdateEntry(TestJaasConfig.LOGIN_CONTEXT_CLIENT, TestPlainLoginModule.class.getName(),
Collections.emptyMap());
server = createEchoServer(securityProtocol);
// Connection should succeed using login callback override that sets correct username/password
saslClientConfigs.put(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, TestLoginCallbackHandler.class.getName());
createAndCheckClientConnection(securityProtocol, "1");
// Connection should fail without login callback override since username/password in jaas config is invalid
saslClientConfigs.remove(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS);
try {
createClientConnection(securityProtocol, "invalid");
} catch (Exception e) {
assertInstanceOf(LoginException.class, e.getCause(), "Unexpected exception " + e.getCause());
}
}
/**
* Tests SASL server login callback
|
override
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoRequest.java
|
{
"start": 739,
"end": 2666
}
|
enum ____ {
BUILD,
LICENSE,
FEATURES;
public static EnumSet<Category> toSet(String... categories) {
EnumSet<Category> set = EnumSet.noneOf(Category.class);
for (String category : categories) {
switch (category) {
case "_all":
return EnumSet.allOf(Category.class);
case "_none":
return EnumSet.noneOf(Category.class);
default:
set.add(Category.valueOf(category.toUpperCase(Locale.ROOT)));
}
}
return set;
}
}
private boolean verbose;
private EnumSet<Category> categories = EnumSet.noneOf(Category.class);
public XPackInfoRequest() {}
public XPackInfoRequest(StreamInput in) throws IOException {
super(in);
this.verbose = in.readBoolean();
EnumSet<Category> categories = EnumSet.noneOf(Category.class);
int size = in.readVInt();
for (int i = 0; i < size; i++) {
categories.add(Category.valueOf(in.readString()));
}
this.categories = categories;
}
public void setVerbose(boolean verbose) {
this.verbose = verbose;
}
public boolean isVerbose() {
return verbose;
}
public void setCategories(EnumSet<Category> categories) {
this.categories = categories;
}
public EnumSet<Category> getCategories() {
return categories;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(verbose);
out.writeVInt(categories.size());
for (Category category : categories) {
out.writeString(category.name());
}
}
}
|
Category
|
java
|
spring-projects__spring-framework
|
spring-web/src/test/java/org/springframework/web/filter/ForwardedHeaderFilterTests.java
|
{
"start": 10521,
"end": 17284
}
|
class ____ {
@Test
void contextPathEmpty() throws Exception {
request.addHeader(X_FORWARDED_PREFIX, "");
assertThat(filterAndGetContextPath()).isEmpty();
}
@Test
void contextPathWithTrailingSlash() throws Exception {
request.addHeader(X_FORWARDED_PREFIX, "/foo/bar/");
assertThat(filterAndGetContextPath()).isEqualTo("/foo/bar");
}
@Test
void contextPathWithTrailingSlashes() throws Exception {
request.addHeader(X_FORWARDED_PREFIX, "/foo/bar/baz///");
assertThat(filterAndGetContextPath()).isEqualTo("/foo/bar/baz");
}
@Test
void contextPathWithForwardedPrefix() throws Exception {
request.addHeader(X_FORWARDED_PREFIX, "/prefix");
request.setContextPath("/mvc-showcase");
String actual = filterAndGetContextPath();
assertThat(actual).isEqualTo("/prefix");
}
@Test
void contextPathWithForwardedPrefixTrailingSlash() throws Exception {
request.addHeader(X_FORWARDED_PREFIX, "/prefix/");
request.setContextPath("/mvc-showcase");
String actual = filterAndGetContextPath();
assertThat(actual).isEqualTo("/prefix");
}
private String filterAndGetContextPath() throws ServletException, IOException {
return filterAndGetWrappedRequest().getContextPath();
}
@Test
void contextPathPreserveEncoding() throws Exception {
request.setContextPath("/app%20");
request.setRequestURI("/app%20/path/");
HttpServletRequest actual = filterAndGetWrappedRequest();
assertThat(actual.getContextPath()).isEqualTo("/app%20");
assertThat(actual.getRequestURI()).isEqualTo("/app%20/path/");
assertThat(actual.getRequestURL().toString()).isEqualTo("http://localhost/app%20/path/");
}
@Test
void requestUri() throws Exception {
request.addHeader(X_FORWARDED_PREFIX, "/");
request.setContextPath("/app");
request.setRequestURI("/app/path");
HttpServletRequest actual = filterAndGetWrappedRequest();
assertThat(actual.getContextPath()).isEmpty();
assertThat(actual.getRequestURI()).isEqualTo("/path");
}
@Test
void requestUriWithTrailingSlash() throws Exception {
request.addHeader(X_FORWARDED_PREFIX, "/");
request.setContextPath("/app");
request.setRequestURI("/app/path/");
HttpServletRequest actual = filterAndGetWrappedRequest();
assertThat(actual.getContextPath()).isEmpty();
assertThat(actual.getRequestURI()).isEqualTo("/path/");
}
@Test
void requestUriPreserveEncoding() throws Exception {
request.setContextPath("/app");
request.setRequestURI("/app/path%20with%20spaces/");
HttpServletRequest actual = filterAndGetWrappedRequest();
assertThat(actual.getContextPath()).isEqualTo("/app");
assertThat(actual.getRequestURI()).isEqualTo("/app/path%20with%20spaces/");
assertThat(actual.getRequestURL().toString()).isEqualTo("http://localhost/app/path%20with%20spaces/");
}
@Test
void requestUriEqualsContextPath() throws Exception {
request.addHeader(X_FORWARDED_PREFIX, "/");
request.setContextPath("/app");
request.setRequestURI("/app");
HttpServletRequest actual = filterAndGetWrappedRequest();
assertThat(actual.getContextPath()).isEmpty();
assertThat(actual.getRequestURI()).isEqualTo("/");
}
@Test
void requestUriRootUrl() throws Exception {
request.addHeader(X_FORWARDED_PREFIX, "/");
request.setContextPath("/app");
request.setRequestURI("/app/");
HttpServletRequest actual = filterAndGetWrappedRequest();
assertThat(actual.getContextPath()).isEmpty();
assertThat(actual.getRequestURI()).isEqualTo("/");
}
@Test
void requestUriPreserveSemicolonContent() throws Exception {
request.setContextPath("");
request.setRequestURI("/path;a=b/with/semicolon");
HttpServletRequest actual = filterAndGetWrappedRequest();
assertThat(actual.getContextPath()).isEmpty();
assertThat(actual.getRequestURI()).isEqualTo("/path;a=b/with/semicolon");
assertThat(actual.getRequestURL().toString()).isEqualTo("http://localhost/path;a=b/with/semicolon");
}
@Test
void caseInsensitiveForwardedPrefix() throws Exception {
request = new MockHttpServletRequest() {
@Override // SPR-14372: make it case-sensitive
public String getHeader(String header) {
Enumeration<String> names = getHeaderNames();
while (names.hasMoreElements()) {
String name = names.nextElement();
if (name.equals(header)) {
return super.getHeader(header);
}
}
return null;
}
};
request.addHeader(X_FORWARDED_PREFIX, "/prefix");
request.setRequestURI("/path");
HttpServletRequest actual = filterAndGetWrappedRequest();
assertThat(actual.getRequestURI()).isEqualTo("/prefix/path");
}
@Test
void requestUriWithForwardedPrefix() throws Exception {
request.addHeader(X_FORWARDED_PREFIX, "/prefix");
request.setRequestURI("/mvc-showcase");
HttpServletRequest actual = filterAndGetWrappedRequest();
assertThat(actual.getRequestURL().toString()).isEqualTo("http://localhost/prefix/mvc-showcase");
}
@Test
void requestUriWithForwardedPrefixTrailingSlash() throws Exception {
request.addHeader(X_FORWARDED_PREFIX, "/prefix/");
request.setRequestURI("/mvc-showcase");
HttpServletRequest actual = filterAndGetWrappedRequest();
assertThat(actual.getRequestURL().toString()).isEqualTo("http://localhost/prefix/mvc-showcase");
}
@Test
void shouldConcatenatePrefixes() throws Exception {
request.addHeader(X_FORWARDED_PREFIX, "/first,/second");
request.setRequestURI("/mvc-showcase");
HttpServletRequest actual = filterAndGetWrappedRequest();
assertThat(actual.getRequestURL().toString()).isEqualTo("http://localhost/first/second/mvc-showcase");
}
@Test
void shouldConcatenatePrefixesWithTrailingSlashes() throws Exception {
request.addHeader(X_FORWARDED_PREFIX, "/first/,/second//");
request.setRequestURI("/mvc-showcase");
HttpServletRequest actual = filterAndGetWrappedRequest();
assertThat(actual.getRequestURL().toString()).isEqualTo("http://localhost/first/second/mvc-showcase");
}
@Test
void shouldRemoveSingleTrailingSlash() throws Exception {
request.addHeader(X_FORWARDED_PREFIX, "/prefix,/");
request.setRequestURI("/mvc-showcase");
HttpServletRequest actual = filterAndGetWrappedRequest();
assertThat(actual.getRequestURL().toString()).isEqualTo("http://localhost/prefix/mvc-showcase");
}
@Test
void requestURLNewStringBuffer() throws Exception {
request.addHeader(X_FORWARDED_PREFIX, "/prefix/");
request.setRequestURI("/mvc-showcase");
HttpServletRequest actual = filterAndGetWrappedRequest();
actual.getRequestURL().append("?key=value");
assertThat(actual.getRequestURL().toString()).isEqualTo("http://localhost/prefix/mvc-showcase");
}
}
@Nested
|
ForwardedPrefix
|
java
|
apache__dubbo
|
dubbo-registry/dubbo-registry-api/src/main/java/org/apache/dubbo/registry/client/ServiceDiscoveryRegistry.java
|
{
"start": 10897,
"end": 17895
}
|
interface ____ to, subscription url: " + url);
// }
return;
}
} finally {
mappingLock.unlock();
}
}
subscribeURLs(url, listener, mappingByUrl);
}
@Override
public final void unsubscribe(URL url, NotifyListener listener) {
if (!shouldSubscribe(url)) { // Should Not Subscribe
return;
}
url = addRegistryClusterKey(url);
doUnsubscribe(url, listener);
}
private URL addRegistryClusterKey(URL url) {
String registryCluster = serviceDiscovery.getUrl().getParameter(REGISTRY_CLUSTER_KEY);
if (registryCluster != null && url.getParameter(REGISTRY_CLUSTER_KEY) == null) {
url = url.addParameter(REGISTRY_CLUSTER_KEY, registryCluster);
}
return url;
}
@Override
public void doUnsubscribe(URL url, NotifyListener listener) {
// TODO: remove service name mapping listener
serviceDiscovery.unsubscribe(url, listener);
String protocolServiceKey = url.getProtocolServiceKey();
Set<String> serviceNames = serviceNameMapping.getMapping(url);
synchronized (mappingListeners) {
Set<MappingListener> keyedListeners = mappingListeners.get(protocolServiceKey);
if (keyedListeners != null) {
List<MappingListener> matched = keyedListeners.stream()
.filter(mappingListener -> mappingListener instanceof DefaultMappingListener
&& (Objects.equals(((DefaultMappingListener) mappingListener).getListener(), listener)))
.collect(Collectors.toList());
for (MappingListener mappingListener : matched) {
serviceNameMapping.stopListen(url, mappingListener);
keyedListeners.remove(mappingListener);
}
if (keyedListeners.isEmpty()) {
mappingListeners.remove(protocolServiceKey, Collections.emptySet());
}
}
}
if (CollectionUtils.isNotEmpty(serviceNames)) {
String serviceNamesKey = toStringKeys(serviceNames);
Lock appSubscriptionLock = getAppSubscription(serviceNamesKey);
try {
appSubscriptionLock.lock();
ServiceInstancesChangedListener instancesChangedListener = serviceListeners.get(serviceNamesKey);
if (instancesChangedListener != null) {
instancesChangedListener.removeListener(url.getServiceKey(), listener);
if (!instancesChangedListener.hasListeners()) {
instancesChangedListener.destroy();
serviceListeners.remove(serviceNamesKey);
removeAppSubscriptionLock(serviceNamesKey);
}
}
} finally {
appSubscriptionLock.unlock();
}
}
}
@Override
public List<URL> lookup(URL url) {
throw new UnsupportedOperationException("");
}
@Override
public boolean isAvailable() {
// serviceDiscovery isAvailable has a default method, which can be used as a reference when implementing
return serviceDiscovery.isAvailable();
}
@Override
public void destroy() {
registryManager.removeDestroyedRegistry(this);
// stop ServiceDiscovery
execute(serviceDiscovery::destroy);
// destroy all event listener
for (ServiceInstancesChangedListener listener : serviceListeners.values()) {
listener.destroy();
}
appSubscriptionLocks.clear();
serviceListeners.clear();
mappingListeners.clear();
}
@Override
public boolean isServiceDiscovery() {
return true;
}
protected void subscribeURLs(URL url, NotifyListener listener, Set<String> serviceNames) {
serviceNames = toTreeSet(serviceNames);
String serviceNamesKey = toStringKeys(serviceNames);
String serviceKey = url.getServiceKey();
logger.info(
String.format("Trying to subscribe from apps %s for service key %s, ", serviceNamesKey, serviceKey));
// register ServiceInstancesChangedListener
Lock appSubscriptionLock = getAppSubscription(serviceNamesKey);
try {
appSubscriptionLock.lock();
ServiceInstancesChangedListener serviceInstancesChangedListener = serviceListeners.get(serviceNamesKey);
if (serviceInstancesChangedListener == null) {
serviceInstancesChangedListener = serviceDiscovery.createListener(serviceNames);
for (String serviceName : serviceNames) {
List<ServiceInstance> serviceInstances = serviceDiscovery.getInstances(serviceName);
if (CollectionUtils.isNotEmpty(serviceInstances)) {
serviceInstancesChangedListener.onEvent(
new ServiceInstancesChangedEvent(serviceName, serviceInstances));
}
}
serviceListeners.put(serviceNamesKey, serviceInstancesChangedListener);
}
if (!serviceInstancesChangedListener.isDestroyed()) {
listener.addServiceListener(serviceInstancesChangedListener);
serviceInstancesChangedListener.addListenerAndNotify(url, listener);
ServiceInstancesChangedListener finalServiceInstancesChangedListener = serviceInstancesChangedListener;
String serviceDiscoveryName =
url.getParameter(RegistryConstants.REGISTRY_CLUSTER_KEY, url.getProtocol());
MetricsEventBus.post(
RegistryEvent.toSsEvent(
url.getApplicationModel(), serviceKey, Collections.singletonList(serviceDiscoveryName)),
() -> {
serviceDiscovery.addServiceInstancesChangedListener(finalServiceInstancesChangedListener);
return null;
});
} else {
logger.info(String.format("Listener of %s has been destroyed by another thread.", serviceNamesKey));
serviceListeners.remove(serviceNamesKey);
}
} finally {
appSubscriptionLock.unlock();
}
}
/**
* Supports or not ?
*
* @param registryURL the {@link URL url} of registry
* @return if supported, return <code>true</code>, or <code>false</code>
*/
public static boolean supports(URL registryURL) {
return SERVICE_REGISTRY_TYPE.equalsIgnoreCase(registryURL.getParameter(REGISTRY_TYPE_KEY));
}
public Map<String, ServiceInstancesChangedListener> getServiceListeners() {
return serviceListeners;
}
private
|
belongs
|
java
|
spring-projects__spring-boot
|
configuration-metadata/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationprocessor/test/TestableAnnotationProcessor.java
|
{
"start": 1421,
"end": 2249
}
|
class ____<T> extends AbstractProcessor {
private final BiConsumer<RoundEnvironmentTester, T> consumer;
private final Function<ProcessingEnvironment, T> factory;
private T target;
public TestableAnnotationProcessor(BiConsumer<RoundEnvironmentTester, T> consumer,
Function<ProcessingEnvironment, T> factory) {
this.consumer = consumer;
this.factory = factory;
}
@Override
public synchronized void init(ProcessingEnvironment env) {
this.target = this.factory.apply(env);
}
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
RoundEnvironmentTester tester = new RoundEnvironmentTester(roundEnv);
if (!roundEnv.getRootElements().isEmpty()) {
this.consumer.accept(tester, this.target);
return true;
}
return false;
}
}
|
TestableAnnotationProcessor
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/query/OrderByThreeEntityTest.java
|
{
"start": 1794,
"end": 2238
}
|
class ____ {
@Id
@GeneratedValue
private Integer id;
@ElementCollection
@OrderBy("value desc")
private Map<Key, Item> data = new HashMap<>();
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Map<Key, Item> getData() {
return data;
}
public void setData(Map<Key, Item> data) {
this.data = data;
}
}
@Entity(name = "MapKey")
@Audited
public static
|
Container
|
java
|
quarkusio__quarkus
|
test-framework/kubernetes-client/src/main/java/io/quarkus/test/kubernetes/client/KubernetesServerTestResource.java
|
{
"start": 383,
"end": 2088
}
|
class ____ extends AbstractKubernetesTestResource<KubernetesServer, NamespacedKubernetesClient>
implements QuarkusTestResourceConfigurableLifecycleManager<WithKubernetesTestServer> {
private boolean https = false;
private boolean crud = true;
private int port = 0;
private Consumer<KubernetesServer> setup;
@Override
public void init(WithKubernetesTestServer annotation) {
this.https = annotation.https();
this.crud = annotation.crud();
this.port = annotation.port();
try {
this.setup = annotation.setup().getDeclaredConstructor().newInstance();
} catch (InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
@Override
protected NamespacedKubernetesClient getClient() {
return server.getClient();
}
@Override
protected void initServer() {
server.before();
}
@Override
protected void configureServer() {
if (setup != null)
setup.accept(server);
}
@Override
protected KubernetesServer createServer() {
return new KubernetesServer(https, crud, InetAddress.getLoopbackAddress(), port, Collections.emptyList());
}
@Override
public void stop() {
if (server != null) {
server.after();
server = null;
}
}
@Override
protected Class<?> getInjectedClass() {
return KubernetesServer.class;
}
@Override
protected Class<? extends Annotation> getInjectionAnnotation() {
return KubernetesTestServer.class;
}
}
|
KubernetesServerTestResource
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/processor/internals/assignment/AssignorConfiguration.java
|
{
"start": 7140,
"end": 7924
}
|
class ____ for " + StreamsConfig.TASK_ASSIGNOR_CLASS_CONFIG + " but got " + userTaskAssignorClassname,
e
);
}
}
public AssignmentListener assignmentListener() {
final Object o = internalConfigs.get(InternalConfig.ASSIGNMENT_LISTENER);
if (o == null) {
return stable -> { };
}
if (!(o instanceof AssignmentListener)) {
final KafkaException fatalException = new KafkaException(
String.format("%s is not an instance of %s", o.getClass().getName(), AssignmentListener.class.getName())
);
log.error(fatalException.getMessage(), fatalException);
throw fatalException;
}
return (AssignmentListener) o;
}
public
|
name
|
java
|
spring-projects__spring-framework
|
spring-context-support/src/main/java/org/springframework/cache/jcache/interceptor/JCacheAspectSupport.java
|
{
"start": 6564,
"end": 6915
}
|
class ____ implements CacheOperationInvoker {
private final CacheOperationInvoker delegate;
public CacheOperationInvokerAdapter(CacheOperationInvoker delegate) {
this.delegate = delegate;
}
@Override
public @Nullable Object invoke() throws ThrowableWrapper {
return invokeOperation(this.delegate);
}
}
}
|
CacheOperationInvokerAdapter
|
java
|
apache__spark
|
launcher/src/main/java/org/apache/spark/launcher/package-info.java
|
{
"start": 2843,
"end": 3583
}
|
class ____ {
* public static void main(String[] args) throws Exception {
* Process spark = new SparkLauncher()
* .setAppResource("/my/app.jar")
* .setMainClass("my.spark.app.Main")
* .setMaster("local")
* .setConf(SparkLauncher.DRIVER_MEMORY, "2g")
* .launch();
* spark.waitFor();
* }
* }
* }
* </pre>
*
* <p>This method requires the calling code to manually manage the child process, including its
* output streams (to avoid possible deadlocks). It's recommended that
* {@link org.apache.spark.launcher.SparkLauncher#startApplication(
* org.apache.spark.launcher.SparkAppHandle.Listener...)} be used instead.</p>
*/
package org.apache.spark.launcher;
|
MyLauncher
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/Count1AggFunction.java
|
{
"start": 1665,
"end": 2935
}
|
class ____ extends DeclarativeAggregateFunction {
private final UnresolvedReferenceExpression count1 = unresolvedRef("count1");
@Override
public int operandCount() {
return 1;
}
@Override
public UnresolvedReferenceExpression[] aggBufferAttributes() {
return new UnresolvedReferenceExpression[] {count1};
}
@Override
public DataType[] getAggBufferTypes() {
return new DataType[] {DataTypes.BIGINT()};
}
@Override
public DataType getResultType() {
return DataTypes.BIGINT();
}
@Override
public Expression[] initialValuesExpressions() {
return new Expression[] {/* count1= */ literal(0L, getResultType().notNull())};
}
@Override
public Expression[] accumulateExpressions() {
return new Expression[] {/* count1= */ plus(count1, literal(1L))};
}
@Override
public Expression[] retractExpressions() {
return new Expression[] {/* count1= */ minus(count1, literal(1L))};
}
@Override
public Expression[] mergeExpressions() {
return new Expression[] {/* count1= */ plus(count1, mergeOperand(count1))};
}
@Override
public Expression getValueExpression() {
return count1;
}
}
|
Count1AggFunction
|
java
|
google__guava
|
android/guava/src/com/google/common/graph/ValueGraphBuilder.java
|
{
"start": 3038,
"end": 8116
}
|
class ____<N, V> extends AbstractGraphBuilder<N> {
/** Creates a new instance with the specified edge directionality. */
private ValueGraphBuilder(boolean directed) {
super(directed);
}
/** Returns a {@link ValueGraphBuilder} for building directed graphs. */
public static ValueGraphBuilder<Object, Object> directed() {
return new ValueGraphBuilder<>(true);
}
/** Returns a {@link ValueGraphBuilder} for building undirected graphs. */
public static ValueGraphBuilder<Object, Object> undirected() {
return new ValueGraphBuilder<>(false);
}
/**
* Returns a {@link ValueGraphBuilder} initialized with all properties queryable from {@code
* graph}.
*
* <p>The "queryable" properties are those that are exposed through the {@link ValueGraph}
* interface, such as {@link ValueGraph#isDirected()}. Other properties, such as {@link
* #expectedNodeCount(int)}, are not set in the new builder.
*/
public static <N, V> ValueGraphBuilder<N, V> from(ValueGraph<N, V> graph) {
return new ValueGraphBuilder<N, V>(graph.isDirected())
.allowsSelfLoops(graph.allowsSelfLoops())
.nodeOrder(graph.nodeOrder())
.incidentEdgeOrder(graph.incidentEdgeOrder());
}
/**
* Returns an {@link ImmutableValueGraph.Builder} with the properties of this {@link
* ValueGraphBuilder}.
*
* <p>The returned builder can be used for populating an {@link ImmutableValueGraph}.
*
* <p>Note that the returned builder will always have {@link #incidentEdgeOrder} set to {@link
* ElementOrder#stable()}, regardless of the value that was set in this builder.
*
* @since 28.0
*/
public <N1 extends N, V1 extends V> ImmutableValueGraph.Builder<N1, V1> immutable() {
ValueGraphBuilder<N1, V1> castBuilder = cast();
return new ImmutableValueGraph.Builder<>(castBuilder);
}
/**
* Specifies whether the graph will allow self-loops (edges that connect a node to itself).
* Attempting to add a self-loop to a graph that does not allow them will throw an {@link
* UnsupportedOperationException}.
*
* <p>The default value is {@code false}.
*/
@CanIgnoreReturnValue
public ValueGraphBuilder<N, V> allowsSelfLoops(boolean allowsSelfLoops) {
this.allowsSelfLoops = allowsSelfLoops;
return this;
}
/**
* Specifies the expected number of nodes in the graph.
*
* @throws IllegalArgumentException if {@code expectedNodeCount} is negative
*/
@CanIgnoreReturnValue
public ValueGraphBuilder<N, V> expectedNodeCount(int expectedNodeCount) {
this.expectedNodeCount = Optional.of(checkNonNegative(expectedNodeCount));
return this;
}
/**
* Specifies the order of iteration for the elements of {@link Graph#nodes()}.
*
* <p>The default value is {@link ElementOrder#insertion() insertion order}.
*/
public <N1 extends N> ValueGraphBuilder<N1, V> nodeOrder(ElementOrder<N1> nodeOrder) {
ValueGraphBuilder<N1, V> newBuilder = cast();
newBuilder.nodeOrder = checkNotNull(nodeOrder);
return newBuilder;
}
/**
* Specifies the order of iteration for the elements of {@link ValueGraph#edges()}, {@link
* ValueGraph#adjacentNodes(Object)}, {@link ValueGraph#predecessors(Object)}, {@link
* ValueGraph#successors(Object)} and {@link ValueGraph#incidentEdges(Object)}.
*
* <p>The default value is {@link ElementOrder#unordered() unordered} for mutable graphs. For
* immutable graphs, this value is ignored; they always have a {@link ElementOrder#stable()
* stable} order.
*
* @throws IllegalArgumentException if {@code incidentEdgeOrder} is not either {@code
* ElementOrder.unordered()} or {@code ElementOrder.stable()}.
* @since 29.0
*/
public <N1 extends N> ValueGraphBuilder<N1, V> incidentEdgeOrder(
ElementOrder<N1> incidentEdgeOrder) {
checkArgument(
incidentEdgeOrder.type() == ElementOrder.Type.UNORDERED
|| incidentEdgeOrder.type() == ElementOrder.Type.STABLE,
"The given elementOrder (%s) is unsupported. incidentEdgeOrder() only supports"
+ " ElementOrder.unordered() and ElementOrder.stable().",
incidentEdgeOrder);
ValueGraphBuilder<N1, V> newBuilder = cast();
newBuilder.incidentEdgeOrder = checkNotNull(incidentEdgeOrder);
return newBuilder;
}
/**
* Returns an empty {@link MutableValueGraph} with the properties of this {@link
* ValueGraphBuilder}.
*/
public <N1 extends N, V1 extends V> MutableValueGraph<N1, V1> build() {
return new StandardMutableValueGraph<>(this);
}
ValueGraphBuilder<N, V> copy() {
ValueGraphBuilder<N, V> newBuilder = new ValueGraphBuilder<>(directed);
newBuilder.allowsSelfLoops = allowsSelfLoops;
newBuilder.nodeOrder = nodeOrder;
newBuilder.expectedNodeCount = expectedNodeCount;
newBuilder.incidentEdgeOrder = incidentEdgeOrder;
return newBuilder;
}
@SuppressWarnings("unchecked")
private <N1 extends N, V1 extends V> ValueGraphBuilder<N1, V1> cast() {
return (ValueGraphBuilder<N1, V1>) this;
}
}
|
ValueGraphBuilder
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/operators/testutils/TestData.java
|
{
"start": 12688,
"end": 15159
}
|
class ____<T extends Tuple2> implements MutableObjectIterator<T> {
private final Tuple2 SENTINEL = new Tuple2();
private final BlockingQueue<Tuple2> queue;
public MockTuple2Reader() {
this.queue = new ArrayBlockingQueue<Tuple2>(32, false);
}
public MockTuple2Reader(int size) {
this.queue = new ArrayBlockingQueue<Tuple2>(size, false);
}
@Override
public T next(T reuse) {
Tuple2 r = null;
while (r == null) {
try {
r = queue.take();
} catch (InterruptedException iex) {
throw new RuntimeException("Reader was interrupted.");
}
}
if (r.equals(SENTINEL)) {
// put the sentinel back, to ensure that repeated calls do not block
try {
queue.put(r);
} catch (InterruptedException e) {
throw new RuntimeException("Reader was interrupted.");
}
return null;
} else {
reuse.setField(r.getField(0), 0);
reuse.setField(r.getField(1), 1);
return reuse;
}
}
@Override
public T next() {
Tuple2 r = null;
while (r == null) {
try {
r = queue.take();
} catch (InterruptedException iex) {
throw new RuntimeException("Reader was interrupted.");
}
}
if (r.equals(SENTINEL)) {
// put the sentinel back, to ensure that repeated calls do not block
try {
queue.put(r);
} catch (InterruptedException e) {
throw new RuntimeException("Reader was interrupted.");
}
return null;
} else {
Tuple2 result = new Tuple2(r.f0, r.f1);
return (T) result;
}
}
public void emit(Tuple2 element) throws InterruptedException {
queue.put(new Tuple2(element.f0, element.f1));
}
public void close() {
try {
queue.put(SENTINEL);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
public static
|
MockTuple2Reader
|
java
|
qos-ch__slf4j
|
slf4j-api/src/test/java/org/slf4j/helpers/StringPrintStream.java
|
{
"start": 1501,
"end": 2374
}
|
class ____ extends PrintStream {
public static final String LINE_SEP = System.getProperty("line.separator");
PrintStream other;
boolean duplicate = false;
public List<String> stringList = Collections.synchronizedList(new ArrayList<>());
public StringPrintStream(PrintStream ps, boolean duplicate) {
super(ps);
other = ps;
this.duplicate = duplicate;
}
public StringPrintStream(PrintStream ps) {
this(ps, false);
}
public void print(String s) {
if (duplicate)
other.print(s);
stringList.add(s);
}
public void println(String s) {
if (duplicate)
other.println(s);
stringList.add(s);
}
public void println(Object o) {
if (duplicate)
other.println(o);
stringList.add(o.toString());
}
}
|
StringPrintStream
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/version/JpaSpecVersionValueUpdatingTest.java
|
{
"start": 644,
"end": 3040
}
|
class ____ {
@AfterEach
public void afterEach(SessionFactoryScope scope) { scope.dropData(); }
@Test
public void testVersionNotIncrementedOnModificationOfNonOwningCollectionNonCascaded(SessionFactoryScope scope) {
final Customer initialCustomer = new Customer();
initialCustomer.id = 1L;
scope.inTransaction( session -> session.persist( initialCustomer ) );
long initialVersion = initialCustomer.version;
Customer customer = scope.fromTransaction( session -> {
Customer c = session.find( Customer.class, 1L );
assertEquals( initialVersion, c.version );
Order order = new Order();
order.id = 1L;
order.customer = c;
c.orders.add( order );
session.persist( order );
return c;
} );
assertEquals( initialVersion, customer.version );
customer = scope.fromTransaction( session -> {
Customer c = session.find( Customer.class, 1L );
assertEquals( initialVersion, c.version );
Order order2 = new Order();
order2.id = 2L;
order2.customer = c;
c.orders.add( order2 );
session.persist( order2 );
return c;
} );
assertEquals( initialVersion, customer.version );
scope.inTransaction( session -> {
Customer c = session.getReference( Customer.class, 1L );
assertEquals( initialVersion, c.version );
} );
}
@Test
public void testVersionNotIncrementedOnModificationOfNonOwningCollectionCascaded(SessionFactoryScope scope) {
Customer initialCustomer = new Customer();
initialCustomer.id = 1L;
scope.inTransaction( session -> session.persist( initialCustomer ) );
long initialVersion = initialCustomer.version;
Customer customer = scope.fromTransaction( session -> {
Customer c = session.find( Customer.class, 1L );
assertEquals( initialVersion, c.version );
Order order = new Order();
order.id = 1L;
order.customer = c;
c.orders.add( order );
return c;
}
);
assertEquals( initialVersion, customer.version );
customer = scope.fromTransaction( session -> {
Customer c = session.find( Customer.class, 1L );
Order order2 = new Order();
order2.id = 2L;
order2.customer = c;
c.orders.add( order2 );
return c;
}
);
assertEquals( initialVersion, customer.version );
scope.inTransaction( session -> {
Customer c = session.getReference( Customer.class, 1L );
assertEquals( initialVersion, c.version );
} );
}
}
|
JpaSpecVersionValueUpdatingTest
|
java
|
alibaba__nacos
|
common/src/main/java/com/alibaba/nacos/common/task/engine/TaskExecuteWorker.java
|
{
"start": 3098,
"end": 3962
}
|
class ____ extends Thread {
InnerWorker(String name) {
setDaemon(false);
setName(name);
}
@Override
public void run() {
while (!closed.get()) {
try {
Runnable task = queue.take();
long begin = System.currentTimeMillis();
task.run();
long duration = System.currentTimeMillis() - begin;
if (duration > 1000L) {
log.warn("task {} takes {}ms", task, duration);
}
} catch (InterruptedException e) {
// [issue #13752] ignore stack log
} catch (Throwable e) {
log.error("[TASK-FAILED] " + e, e);
}
}
}
}
}
|
InnerWorker
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/jmx/support/JmxUtilsTests.java
|
{
"start": 5054,
"end": 5236
}
|
class ____ {
private String name;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
public static
|
AttributeTestBean
|
java
|
hibernate__hibernate-orm
|
hibernate-spatial/src/test/java/org/hibernate/spatial/integration/functions/CommonFunctionTests.java
|
{
"start": 1535,
"end": 3249
}
|
class ____ extends SpatialTestBase {
public final static TestSupport.TestDataPurpose PURPOSE = TestSupport.TestDataPurpose.SpatialFunctionsData;
List received = new ArrayList();
List expected = new ArrayList();
@Override
public TestSupport.TestDataPurpose purpose() {
return PURPOSE;
}
@TestFactory
public Stream<DynamicTest> testFunction() {
return
TestTemplates.all( templates, hqlOverrides, geometryEquality, filterGeometry )
.filter( f -> isSupported( f.function ) )
.filter( f -> !exludeFromTest.contains( f.function ) )
.flatMap( t -> Stream.of(
t.build( Model.JTSMODEL, codec ),
t.build( Model.GLMODEL, codec )
) )
.flatMap( this::buildTests );
}
protected Stream<DynamicTest> buildTests(FunctionTestTemplate template) {
return Stream.of(
template.getFunctionName(),
template.getAltFunctionName()
)
.filter( Objects::nonNull )
.map( fn -> DynamicTest.dynamicTest(
displayName( template, fn ), executableTest( template, fn )
) );
}
protected String displayName(FunctionTestTemplate template, String fnName) {
return String.format(
Locale.ROOT,
"Test for function %s on entity %s",
fnName,
template.getModel().entityClass.getSimpleName()
);
}
protected Executable executableTest(FunctionTestTemplate template, String fnName) {
return () -> {
expected.clear();
received.clear();
expected = template.executeNativeQuery( scope );
received = template.executeHQL( scope, fnName );
if ( !expected.equals( received ) ) {
for ( int i = 0; i < expected.size(); i++ ) {
assertEquals( expected.get( i ), received.get( i ) );
}
}
};
}
}
|
CommonFunctionTests
|
java
|
apache__kafka
|
streams/integration-tests/src/test/java/org/apache/kafka/streams/integration/SuppressionDurabilityIntegrationTest.java
|
{
"start": 3877,
"end": 10706
}
|
class ____ {
private static final long NOW = Instant.now().toEpochMilli();
public static final EmbeddedKafkaCluster CLUSTER = new EmbeddedKafkaCluster(3);
@BeforeAll
public static void startCluster() throws IOException {
CLUSTER.start();
}
@AfterAll
public static void closeCluster() {
CLUSTER.stop();
}
public TestInfo testInfo;
private static final StringDeserializer STRING_DESERIALIZER = new StringDeserializer();
private static final StringSerializer STRING_SERIALIZER = new StringSerializer();
private static final Serde<String> STRING_SERDE = Serdes.String();
private static final LongDeserializer LONG_DESERIALIZER = new LongDeserializer();
private static final long COMMIT_INTERVAL = 100L;
@Test
public void shouldRecoverBufferAfterShutdown(final TestInfo testInfo) {
final String testId = safeUniqueTestName(testInfo);
final String appId = "appId_" + testId;
final String input = "input" + testId;
final String storeName = "counts";
final String outputSuppressed = "output-suppressed" + testId;
final String outputRaw = "output-raw" + testId;
// create multiple partitions as a trap, in case the buffer doesn't properly set the
// partition on the records, but instead relies on the default key partitioner
cleanStateBeforeTest(CLUSTER, 2, input, outputRaw, outputSuppressed);
final StreamsBuilder builder = new StreamsBuilder();
final KTable<String, Long> valueCounts = builder
.stream(
input,
Consumed.with(STRING_SERDE, STRING_SERDE))
.groupByKey()
.count(Materialized.<String, Long, KeyValueStore<Bytes, byte[]>>as(storeName).withCachingDisabled());
final KStream<String, Long> suppressedCounts = valueCounts
.suppress(untilTimeLimit(ofMillis(MAX_VALUE), maxRecords(3L).emitEarlyWhenFull()))
.toStream();
final AtomicInteger eventCount = new AtomicInteger(0);
suppressedCounts.foreach((key, value) -> eventCount.incrementAndGet());
// expect all post-suppress records to keep the right input topic
final MetadataValidator metadataValidator = new MetadataValidator(input);
suppressedCounts
.process(metadataValidator)
.to(outputSuppressed, Produced.with(STRING_SERDE, Serdes.Long()));
valueCounts
.toStream()
.process(metadataValidator)
.to(outputRaw, Produced.with(STRING_SERDE, Serdes.Long()));
final Properties streamsConfig = mkProperties(mkMap(
mkEntry(StreamsConfig.APPLICATION_ID_CONFIG, appId),
mkEntry(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()),
mkEntry(StreamsConfig.POLL_MS_CONFIG, Long.toString(COMMIT_INTERVAL)),
mkEntry(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath())
));
streamsConfig.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, COMMIT_INTERVAL);
KafkaStreams driver = getStartedStreams(streamsConfig, builder, true);
try {
// start by putting some stuff in the buffer
// note, we send all input records to partition 0
// to make sure that supppress doesn't erroneously send records to other partitions.
produceSynchronouslyToPartitionZero(
input,
asList(
new KeyValueTimestamp<>("k1", "v1", scaledTime(1L)),
new KeyValueTimestamp<>("k2", "v2", scaledTime(2L)),
new KeyValueTimestamp<>("k3", "v3", scaledTime(3L))
)
);
verifyOutput(
outputRaw,
asList(
new KeyValueTimestamp<>("k1", 1L, scaledTime(1L)),
new KeyValueTimestamp<>("k2", 1L, scaledTime(2L)),
new KeyValueTimestamp<>("k3", 1L, scaledTime(3L))
)
);
assertThat(eventCount.get(), is(0));
// flush two of the first three events out.
produceSynchronouslyToPartitionZero(
input,
asList(
new KeyValueTimestamp<>("k4", "v4", scaledTime(4L)),
new KeyValueTimestamp<>("k5", "v5", scaledTime(5L))
)
);
verifyOutput(
outputRaw,
asList(
new KeyValueTimestamp<>("k4", 1L, scaledTime(4L)),
new KeyValueTimestamp<>("k5", 1L, scaledTime(5L))
)
);
assertThat(eventCount.get(), is(2));
verifyOutput(
outputSuppressed,
asList(
new KeyValueTimestamp<>("k1", 1L, scaledTime(1L)),
new KeyValueTimestamp<>("k2", 1L, scaledTime(2L))
)
);
// bounce to ensure that the history, including retractions,
// get restored properly. (i.e., we shouldn't see those first events again)
// restart the driver
driver.close();
assertThat(driver.state(), is(KafkaStreams.State.NOT_RUNNING));
driver = getStartedStreams(streamsConfig, builder, false);
// flush those recovered buffered events out.
produceSynchronouslyToPartitionZero(
input,
asList(
new KeyValueTimestamp<>("k6", "v6", scaledTime(6L)),
new KeyValueTimestamp<>("k7", "v7", scaledTime(7L)),
new KeyValueTimestamp<>("k8", "v8", scaledTime(8L))
)
);
verifyOutput(
outputRaw,
asList(
new KeyValueTimestamp<>("k6", 1L, scaledTime(6L)),
new KeyValueTimestamp<>("k7", 1L, scaledTime(7L)),
new KeyValueTimestamp<>("k8", 1L, scaledTime(8L))
)
);
assertThat("suppress has apparently produced some duplicates. There should only be 5 output events.",
eventCount.get(), is(5));
verifyOutput(
outputSuppressed,
asList(
new KeyValueTimestamp<>("k3", 1L, scaledTime(3L)),
new KeyValueTimestamp<>("k4", 1L, scaledTime(4L)),
new KeyValueTimestamp<>("k5", 1L, scaledTime(5L))
)
);
metadataValidator.raiseExceptionIfAny();
} finally {
driver.close();
quietlyCleanStateAfterTest(CLUSTER, driver);
}
}
private static final
|
SuppressionDurabilityIntegrationTest
|
java
|
google__auto
|
value/src/main/java/com/google/auto/value/processor/TypeEncoder.java
|
{
"start": 15295,
"end": 16633
}
|
class ____ an
// inner (not static) class.
visit2(enclosing, sb);
sb.append(".").append(type.asElement().getSimpleName());
} else {
sb.append('`').append(className(type)).append('`');
}
}
void appendTypeArguments(DeclaredType type, StringBuilder sb) {
List<? extends TypeMirror> arguments = type.getTypeArguments();
if (!arguments.isEmpty()) {
sb.append("<");
String sep = "";
for (TypeMirror argument : arguments) {
sb.append(sep);
sep = ", ";
visit2(argument, sb);
}
sb.append(">");
}
}
@Override
public StringBuilder visitWildcard(WildcardType type, StringBuilder sb) {
sb.append("?");
TypeMirror extendsBound = type.getExtendsBound();
TypeMirror superBound = type.getSuperBound();
if (superBound != null) {
sb.append(" super ");
visit2(superBound, sb);
} else if (extendsBound != null) {
sb.append(" extends ");
visit2(extendsBound, sb);
}
return sb;
}
@Override
public StringBuilder visitError(ErrorType t, StringBuilder p) {
throw new MissingTypeException(t);
}
}
/** Like {@link EncodingTypeVisitor} except that type parameters are omitted from the result. */
private static
|
is
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/streaming/runtime/watermarkstatus/HeapPriorityQueue.java
|
{
"start": 2118,
"end": 8478
}
|
class ____<T extends HeapPriorityQueue.HeapPriorityQueueElement> {
/** The index of the head element in the array that represents the heap. */
private static final int QUEUE_HEAD_INDEX = 1;
/** Comparator for the priority of contained elements. */
@Nonnull private final PriorityComparator<T> elementPriorityComparator;
/** The array that represents the heap-organized priority queue. */
@Nonnull private T[] queue;
/** The current size of the priority queue. */
@Nonnegative private int size;
/**
* Creates an empty {@link HeapPriorityQueue} with the requested initial capacity.
*
* @param elementPriorityComparator comparator for the priority of contained elements.
* @param minimumCapacity the minimum and initial capacity of this priority queue.
*/
@SuppressWarnings("unchecked")
public HeapPriorityQueue(
@Nonnull PriorityComparator<T> elementPriorityComparator,
@Nonnegative int minimumCapacity) {
this.queue = (T[]) new HeapPriorityQueueElement[getHeadElementIndex() + minimumCapacity];
this.size = 0;
this.elementPriorityComparator = elementPriorityComparator;
}
public void adjustModifiedElement(@Nonnull T element) {
final int elementIndex = element.getInternalIndex();
if (element == queue[elementIndex]) {
adjustElementAtIndex(element, elementIndex);
}
}
@Nullable
public T poll() {
return size() > 0 ? removeInternal(getHeadElementIndex()) : null;
}
@Nullable
public T peek() {
// References to removed elements are expected to become set to null.
return queue[getHeadElementIndex()];
}
public boolean add(@Nonnull T toAdd) {
addInternal(toAdd);
return toAdd.getInternalIndex() == getHeadElementIndex();
}
public boolean remove(@Nonnull T toRemove) {
final int elementIndex = toRemove.getInternalIndex();
removeInternal(elementIndex);
return elementIndex == getHeadElementIndex();
}
public boolean isEmpty() {
return size() == 0;
}
public int size() {
return size;
}
/** Clears the queue. */
public void clear() {
final int arrayOffset = getHeadElementIndex();
Arrays.fill(queue, arrayOffset, arrayOffset + size, null);
size = 0;
}
private void resizeQueueArray(int desiredSize, int minRequiredSize) {
if (isValidArraySize(desiredSize)) {
queue = Arrays.copyOf(queue, desiredSize);
} else if (isValidArraySize(minRequiredSize)) {
queue = Arrays.copyOf(queue, MAX_ARRAY_SIZE);
} else {
throw new OutOfMemoryError(
"Required minimum heap size "
+ minRequiredSize
+ " exceeds maximum size of "
+ MAX_ARRAY_SIZE
+ ".");
}
}
private void moveElementToIdx(T element, int idx) {
queue[idx] = element;
element.setInternalIndex(idx);
}
private static boolean isValidArraySize(int size) {
return size >= 0 && size <= MAX_ARRAY_SIZE;
}
private int getHeadElementIndex() {
return QUEUE_HEAD_INDEX;
}
private void addInternal(@Nonnull T element) {
final int newSize = increaseSizeByOne();
moveElementToIdx(element, newSize);
siftUp(newSize);
}
private T removeInternal(int removeIdx) {
T[] heap = this.queue;
T removedValue = heap[removeIdx];
assert removedValue.getInternalIndex() == removeIdx;
final int oldSize = size;
if (removeIdx != oldSize) {
T element = heap[oldSize];
moveElementToIdx(element, removeIdx);
adjustElementAtIndex(element, removeIdx);
}
heap[oldSize] = null;
--size;
return removedValue;
}
private void adjustElementAtIndex(T element, int index) {
siftDown(index);
if (queue[index] == element) {
siftUp(index);
}
}
private void siftUp(int idx) {
final T[] heap = this.queue;
final T currentElement = heap[idx];
int parentIdx = idx >>> 1;
while (parentIdx > 0 && isElementPriorityLessThen(currentElement, heap[parentIdx])) {
moveElementToIdx(heap[parentIdx], idx);
idx = parentIdx;
parentIdx >>>= 1;
}
moveElementToIdx(currentElement, idx);
}
private void siftDown(int idx) {
final T[] heap = this.queue;
final int heapSize = this.size;
final T currentElement = heap[idx];
int firstChildIdx = idx << 1;
int secondChildIdx = firstChildIdx + 1;
if (isElementIndexValid(secondChildIdx, heapSize)
&& isElementPriorityLessThen(heap[secondChildIdx], heap[firstChildIdx])) {
firstChildIdx = secondChildIdx;
}
while (isElementIndexValid(firstChildIdx, heapSize)
&& isElementPriorityLessThen(heap[firstChildIdx], currentElement)) {
moveElementToIdx(heap[firstChildIdx], idx);
idx = firstChildIdx;
firstChildIdx = idx << 1;
secondChildIdx = firstChildIdx + 1;
if (isElementIndexValid(secondChildIdx, heapSize)
&& isElementPriorityLessThen(heap[secondChildIdx], heap[firstChildIdx])) {
firstChildIdx = secondChildIdx;
}
}
moveElementToIdx(currentElement, idx);
}
private boolean isElementIndexValid(int elementIndex, int heapSize) {
return elementIndex <= heapSize;
}
private boolean isElementPriorityLessThen(T a, T b) {
return elementPriorityComparator.comparePriority(a, b) < 0;
}
private int increaseSizeByOne() {
final int oldArraySize = queue.length;
final int minRequiredNewSize = ++size;
if (minRequiredNewSize >= oldArraySize) {
final int grow = (oldArraySize < 64) ? oldArraySize + 2 : oldArraySize >> 1;
resizeQueueArray(oldArraySize + grow, minRequiredNewSize);
}
// TODO implement shrinking as well?
return minRequiredNewSize;
}
/**
* This
|
HeapPriorityQueue
|
java
|
spring-projects__spring-security
|
core/src/test/java/org/springframework/security/provisioning/InMemoryUserDetailsManagerTests.java
|
{
"start": 9648,
"end": 10357
}
|
class ____ implements MutableUserDetails, CredentialsContainer {
private final UserDetails delegate;
private String password;
CustomUser(UserDetails user) {
this.delegate = user;
this.password = user.getPassword();
}
@Override
public Collection<? extends GrantedAuthority> getAuthorities() {
return this.delegate.getAuthorities();
}
@Override
public String getPassword() {
return this.password;
}
@Override
public void setPassword(final String password) {
this.password = password;
}
@Override
public String getUsername() {
return this.delegate.getUsername();
}
@Override
public void eraseCredentials() {
this.password = null;
}
}
}
|
CustomUser
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesAction.java
|
{
"start": 464,
"end": 767
}
|
class ____ extends ActionType<PutPrivilegesResponse> {
public static final PutPrivilegesAction INSTANCE = new PutPrivilegesAction();
public static final String NAME = "cluster:admin/xpack/security/privilege/put";
private PutPrivilegesAction() {
super(NAME);
}
}
|
PutPrivilegesAction
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/test/java/org/apache/flink/table/runtime/operators/over/NonTimeOverWindowTestBase.java
|
{
"start": 5434,
"end": 6249
}
|
class ____ implements RecordEqualiser {
private static final long serialVersionUID = -6706336100425614942L;
@Override
public boolean equals(RowData row1, RowData row2) {
if (row1 instanceof BinaryRowData && row2 instanceof BinaryRowData) {
return row1.equals(row2);
} else if (row1 instanceof GenericRowData && row2 instanceof GenericRowData) {
return row1.getString(0).equals(row2.getString(0))
&& row1.getLong(1) == row2.getLong(1)
&& row1.getLong(2) == row2.getLong(2);
} else {
throw new UnsupportedOperationException();
}
}
}
/** Custom test sortKey equaliser for comparing sort keys. */
public static
|
TestRowValueEqualiser
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/NodeShutdownAllocationDeciderTests.java
|
{
"start": 1927,
"end": 12493
}
|
class ____ extends ESAllocationTestCase {
private static final DiscoveryNode DATA_NODE = newNode("node-data", Set.of(DiscoveryNodeRole.DATA_ROLE));
private final ShardRouting shard = ShardRouting.newUnassigned(
new ShardId("myindex", "myindex", 0),
true,
RecoverySource.EmptyStoreRecoverySource.INSTANCE,
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "index created"),
ShardRouting.Role.DEFAULT
);
private final ClusterSettings clusterSettings = createBuiltInClusterSettings();
private final NodeShutdownAllocationDecider decider = new NodeShutdownAllocationDecider();
private final AllocationDeciders allocationDeciders = new AllocationDeciders(
Arrays.asList(decider, new SameShardAllocationDecider(clusterSettings), new ReplicaAfterPrimaryActiveAllocationDecider())
);
private final String idxName = "test-idx";
private final String idxUuid = "test-idx-uuid";
private final IndexMetadata indexMetadata = IndexMetadata.builder(idxName)
.settings(indexSettings(IndexVersion.current(), 1, 0).put(IndexMetadata.SETTING_INDEX_UUID, idxUuid))
.build();
private static final List<SingleNodeShutdownMetadata.Type> REMOVE_SHUTDOWN_TYPES = List.of(
SingleNodeShutdownMetadata.Type.REPLACE,
SingleNodeShutdownMetadata.Type.REMOVE,
SingleNodeShutdownMetadata.Type.SIGTERM
);
public void testCanAllocateShardsToRestartingNode() {
ClusterState state = prepareState(SingleNodeShutdownMetadata.Type.RESTART);
RoutingAllocation allocation = createRoutingAllocation(state);
RoutingNode routingNode = RoutingNodesHelper.routingNode(DATA_NODE.getId(), DATA_NODE, shard);
Decision decision = decider.canAllocate(shard, routingNode, allocation);
assertThat(decision.type(), equalTo(Decision.Type.YES));
assertThat(
decision.getExplanation(),
equalTo("node [" + DATA_NODE.getId() + "] is preparing to restart, but will remain in the cluster")
);
}
public void testCannotAllocateShardsToRemovingNode() {
for (SingleNodeShutdownMetadata.Type type : REMOVE_SHUTDOWN_TYPES) {
ClusterState state = prepareState(type);
RoutingAllocation allocation = createRoutingAllocation(state);
RoutingNode routingNode = RoutingNodesHelper.routingNode(DATA_NODE.getId(), DATA_NODE, shard);
Decision decision = decider.canAllocate(shard, routingNode, allocation);
assertThat(type.toString(), decision.type(), equalTo(Decision.Type.NO));
assertThat(decision.getExplanation(), equalTo("node [" + DATA_NODE.getId() + "] is preparing to be removed from the cluster"));
}
}
public void testShardsCanRemainOnRestartingNode() {
ClusterState state = prepareState(SingleNodeShutdownMetadata.Type.RESTART);
RoutingAllocation allocation = createRoutingAllocation(state);
RoutingNode routingNode = RoutingNodesHelper.routingNode(DATA_NODE.getId(), DATA_NODE, shard);
Decision decision = decider.canRemain(null, shard, routingNode, allocation);
assertThat(decision.type(), equalTo(Decision.Type.YES));
assertThat(
decision.getExplanation(),
equalTo("node [" + DATA_NODE.getId() + "] is preparing to restart, but will remain in the cluster")
);
}
public void testShardsCannotRemainOnRemovingNode() {
for (SingleNodeShutdownMetadata.Type type : REMOVE_SHUTDOWN_TYPES) {
ClusterState state = prepareState(type);
RoutingAllocation allocation = createRoutingAllocation(state);
RoutingNode routingNode = RoutingNodesHelper.routingNode(DATA_NODE.getId(), DATA_NODE, shard);
Decision decision = decider.canRemain(null, shard, routingNode, allocation);
assertThat(type.toString(), decision.type(), equalTo(Decision.Type.NO));
assertThat(
type.toString(),
decision.getExplanation(),
equalTo("node [" + DATA_NODE.getId() + "] is preparing to be removed from the cluster")
);
}
}
public void testCanAutoExpandToRestartingNode() {
ClusterState state = prepareState(SingleNodeShutdownMetadata.Type.RESTART);
RoutingAllocation allocation = createRoutingAllocation(state);
Decision decision = decider.shouldAutoExpandToNode(indexMetadata, DATA_NODE, allocation);
assertThat(decision.type(), equalTo(Decision.Type.YES));
assertThat(
decision.getExplanation(),
equalTo("node [" + DATA_NODE.getId() + "] is preparing to restart, but will remain in the cluster")
);
}
public void testCanAutoExpandToNodeIfNoNodesShuttingDown() {
RoutingAllocation allocation = createRoutingAllocation(ClusterState.EMPTY_STATE);
Decision decision = decider.shouldAutoExpandToNode(indexMetadata, DATA_NODE, allocation);
assertThat(decision.type(), equalTo(Decision.Type.YES));
assertThat(decision.getExplanation(), equalTo("no nodes are shutting down"));
}
public void testCanAutoExpandToNodeThatIsNotShuttingDown() {
for (SingleNodeShutdownMetadata.Type type : REMOVE_SHUTDOWN_TYPES) {
ClusterState state = prepareState(type, "other-node-id");
RoutingAllocation allocation = createRoutingAllocation(state);
Decision decision = decider.shouldAutoExpandToNode(indexMetadata, DATA_NODE, allocation);
assertThat(type.toString(), decision.type(), equalTo(Decision.Type.YES));
assertThat(type.toString(), decision.getExplanation(), equalTo("this node is not shutting down"));
}
}
public void testCannotAutoExpandToRemovingNode() {
for (SingleNodeShutdownMetadata.Type type : List.of(
SingleNodeShutdownMetadata.Type.REMOVE,
SingleNodeShutdownMetadata.Type.SIGTERM
)) {
ClusterState state = prepareState(type);
RoutingAllocation allocation = createRoutingAllocation(state);
Decision decision = decider.shouldAutoExpandToNode(indexMetadata, DATA_NODE, allocation);
assertThat(decision.type(), equalTo(Decision.Type.NO));
assertThat(decision.getExplanation(), equalTo("node [" + DATA_NODE.getId() + "] is preparing to be removed from the cluster"));
}
}
public void testAutoExpandDuringNodeReplacement() {
var state = ClusterState.builder(ClusterName.DEFAULT)
.nodes(DiscoveryNodes.builder().add(DATA_NODE).build())
.metadata(Metadata.builder().put(IndexMetadata.builder(indexMetadata)))
.build();
// should auto-expand when no shutdown
assertThatDecision(
decider.shouldAutoExpandToNode(indexMetadata, DATA_NODE, createRoutingAllocation(state)),
Decision.Type.YES,
"no nodes are shutting down"
);
// should auto-expand to source when shutdown/replacement entry is registered and node replacement has not started
var shutdown = createNodesShutdownMetadata(SingleNodeShutdownMetadata.Type.REPLACE, DATA_NODE.getId());
state = ClusterState.builder(state)
.metadata(Metadata.builder(state.metadata()).putCustom(NodesShutdownMetadata.TYPE, shutdown).build())
.build();
assertThatDecision(
decider.shouldAutoExpandToNode(indexMetadata, DATA_NODE, createRoutingAllocation(state)),
Decision.Type.YES,
"node [" + DATA_NODE.getId() + "] is preparing to be removed from the cluster, but replacement is not yet present"
);
// should auto-expand to replacement when node replacement has started
var replacementName = shutdown.get(DATA_NODE.getId()).getTargetNodeName();
var replacementNode = newNode(replacementName, "node-data-1", Set.of(DiscoveryNodeRole.DATA_ROLE));
state = ClusterState.builder(state).nodes(DiscoveryNodes.builder(state.getNodes()).add(replacementNode).build()).build();
assertThatDecision(
decider.shouldAutoExpandToNode(indexMetadata, DATA_NODE, createRoutingAllocation(state)),
Decision.Type.NO,
"node [" + DATA_NODE.getId() + "] is preparing to be removed from the cluster"
);
assertThatDecision(
decider.shouldAutoExpandToNode(indexMetadata, replacementNode, createRoutingAllocation(state)),
Decision.Type.YES,
"this node is not shutting down"
);
}
private ClusterState prepareState(SingleNodeShutdownMetadata.Type shutdownType) {
return prepareState(shutdownType, DATA_NODE.getId());
}
private ClusterState prepareState(SingleNodeShutdownMetadata.Type shutdownType, String nodeId) {
return ClusterState.builder(ClusterName.DEFAULT)
.nodes(DiscoveryNodes.builder().add(DATA_NODE).build())
.metadata(
Metadata.builder()
.put(IndexMetadata.builder(indexMetadata))
.putCustom(NodesShutdownMetadata.TYPE, createNodesShutdownMetadata(shutdownType, nodeId))
)
.build();
}
private NodesShutdownMetadata createNodesShutdownMetadata(SingleNodeShutdownMetadata.Type shutdownType, String nodeId) {
final String targetNodeName = shutdownType == SingleNodeShutdownMetadata.Type.REPLACE ? randomAlphaOfLengthBetween(10, 20) : null;
return new NodesShutdownMetadata(new HashMap<>()).putSingleNodeMetadata(
SingleNodeShutdownMetadata.builder()
.setNodeId(nodeId)
.setNodeEphemeralId(nodeId)
.setType(shutdownType)
.setReason(this.getTestName())
.setStartedAtMillis(1L)
.setTargetNodeName(targetNodeName)
.setGracePeriod(shutdownType == SIGTERM ? randomTimeValue() : null)
.build()
);
}
private RoutingAllocation createRoutingAllocation(ClusterState state) {
var allocation = new RoutingAllocation(allocationDeciders, state, null, null, 0);
allocation.debugDecision(true);
return allocation;
}
private static void assertThatDecision(Decision decision, Decision.Type type, String explanation) {
assertThat(decision.type(), equalTo(type));
assertThat(decision.getExplanation(), equalTo(explanation));
}
}
|
NodeShutdownAllocationDeciderTests
|
java
|
quarkusio__quarkus
|
extensions/tls-registry/deployment/src/test/java/io/quarkus/tls/PemCertOrderWithNotEnoughValueTest.java
|
{
"start": 823,
"end": 1810
}
|
class ____ {
private static final String configuration = """
quarkus.tls.key-store.pem.foo.cert=target/certs/test-format.crt
quarkus.tls.key-store.pem.foo.key=target/certs/test-formats.key
quarkus.tls.key-store.pem.bar.cert=target/certs/test-format.crt
quarkus.tls.key-store.pem.bar.key=target/certs/test-formats.key
quarkus.tls.key-store.pem.order=bar
""";
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class)
.add(new StringAsset(configuration), "application.properties"))
.assertException(t -> assertThat(t.getCause().getMessage()).contains("`order`", "keyCerts`"));
@Test
void test() throws KeyStoreException, CertificateParsingException {
fail("Should not be called as the extension should fail before.");
}
}
|
PemCertOrderWithNotEnoughValueTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/InterruptedExceptionSwallowedTest.java
|
{
"start": 8146,
"end": 8681
}
|
class ____ {
void test(Future<?> future) {
try {
future.get();
// BUG: Diagnostic contains:
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
}
""")
.doTest();
}
@Test
public void positiveRefactoring() {
refactoringHelper
.addInputLines(
"Test.java",
"""
import java.util.concurrent.Future;
|
Test
|
java
|
apache__camel
|
components/camel-vertx/camel-vertx-websocket/src/main/java/org/apache/camel/component/vertx/websocket/VertxWebsocketConstants.java
|
{
"start": 996,
"end": 3044
}
|
class ____ {
public static final String DEFAULT_VERTX_SERVER_HOST = NetServerOptions.DEFAULT_HOST;
public static final int DEFAULT_VERTX_SERVER_PORT = NetServerOptions.DEFAULT_PORT;
public static final int DEFAULT_VERTX_CLIENT_WS_PORT = HttpClientOptions.DEFAULT_DEFAULT_PORT;
public static final int DEFAULT_VERTX_CLIENT_WSS_PORT = 443;
public static final String ORIGIN_HTTP_HEADER_NAME = "Origin";
@Metadata(description = "Sends the message to the client with the given connection key. You can\n" +
"use a comma separated list of keys to send a message to multiple clients. " +
"Note that this option only applies when producing messages to endpoints hosted by the vertx-websocket consumer and not to an externally hosted WebSocket.",
javaType = "String")
public static final String CONNECTION_KEY = "CamelVertxWebsocket.connectionKey";
@Metadata(label = "producer", description = "Sends the message to all clients which are currently connected. You can\n" +
"use the `sendToAll` option on the endpoint instead of using this header. " +
"Note that this option only applies when producing messages to endpoints hosted by the vertx-websocket consumer and not to an externally hosted WebSocket.",
javaType = "boolean")
public static final String SEND_TO_ALL = "CamelVertxWebsocket.sendToAll";
@Metadata(label = "consumer", description = "The remote address.", javaType = "io.vertx.core.net.SocketAddress")
public static final String REMOTE_ADDRESS = "CamelVertxWebsocket.remoteAddress";
@Metadata(label = "consumer", description = "The WebSocket event that triggered the message exchange.",
javaType = "org.apache.camel.component.vertx.websocket.VertxWebsocketEvent")
public static final String EVENT = "CamelVertxWebsocket.event";
private VertxWebsocketConstants() {
}
}
|
VertxWebsocketConstants
|
java
|
apache__camel
|
components/camel-joor/src/main/java/org/apache/camel/language/joor/CompilationUnit.java
|
{
"start": 2190,
"end": 3198
}
|
class ____
* @return the compiled byte code
*/
public byte[] getByteCode(String className) {
return compiled.get(className);
}
/**
* Number of classes in the result
*/
public int size() {
return classes.size();
}
/**
* Set of the classes by their names
*/
public Set<String> getClassNames() {
return classes.keySet();
}
/**
* Set of the compiled classes by their names
*/
public Set<String> getCompiledClassNames() {
return compiled.keySet();
}
}
static CompilationUnit.Result result() {
return new Result();
}
/**
* Creates a new compilation unit for holding input files.
*/
public static CompilationUnit input() {
return new CompilationUnit();
}
/**
* Adds input to the compilation unit.
*
* @param className the
|
name
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webmvc/src/main/java/org/springframework/cloud/gateway/server/mvc/filter/XForwardedRequestHeadersFilter.java
|
{
"start": 1423,
"end": 8405
}
|
class ____ implements HttpHeadersFilter.RequestHttpHeadersFilter, Ordered {
private static final Log log = LogFactory.getLog(XForwardedRequestHeadersFilter.class);
/** Default http port. */
public static final int HTTP_PORT = 80;
/** Default https port. */
public static final int HTTPS_PORT = 443;
/** Http url scheme. */
public static final String HTTP_SCHEME = "http";
/** Https url scheme. */
public static final String HTTPS_SCHEME = "https";
/** X-Forwarded-For Header. */
public static final String X_FORWARDED_FOR_HEADER = "X-Forwarded-For";
/** X-Forwarded-Host Header. */
public static final String X_FORWARDED_HOST_HEADER = "X-Forwarded-Host";
/** X-Forwarded-Port Header. */
public static final String X_FORWARDED_PORT_HEADER = "X-Forwarded-Port";
/** X-Forwarded-Proto Header. */
public static final String X_FORWARDED_PROTO_HEADER = "X-Forwarded-Proto";
/** X-Forwarded-Prefix Header. */
public static final String X_FORWARDED_PREFIX_HEADER = "X-Forwarded-Prefix";
private final XForwardedRequestHeadersFilterProperties properties;
private final TrustedProxies trustedProxies;
@Deprecated
public XForwardedRequestHeadersFilter(XForwardedRequestHeadersFilterProperties properties) {
this(properties, s -> true);
log.warn(GatewayMvcProperties.PREFIX
+ ".trusted-proxies is not set. Using deprecated Constructor. Untrusted hosts might be added to X-Forwarded header.");
}
public XForwardedRequestHeadersFilter(XForwardedRequestHeadersFilterProperties props, String trustedProxies) {
this(props, TrustedProxies.from(trustedProxies));
}
private XForwardedRequestHeadersFilter(XForwardedRequestHeadersFilterProperties props,
TrustedProxies trustedProxies) {
Objects.requireNonNull(props, "XForwardedRequestHeadersFilterProperties must not be null");
Objects.requireNonNull(trustedProxies, "trustedProxies must not be null");
this.properties = props;
this.trustedProxies = trustedProxies;
}
@Override
public int getOrder() {
return properties.getOrder();
}
@Override
public HttpHeaders apply(HttpHeaders input, ServerRequest request) {
if (request.servletRequest().getRemoteAddr() != null
&& !trustedProxies.isTrusted(request.servletRequest().getRemoteAddr())) {
log.trace(LogMessage.format("Remote address not trusted. pattern %s remote address %s", trustedProxies,
request.servletRequest().getRemoteHost()));
return input;
}
HttpHeaders original = input;
HttpHeaders updated = new HttpHeaders();
for (Map.Entry<String, List<String>> entry : original.headerSet()) {
updated.addAll(entry.getKey(), entry.getValue());
}
if (properties.isForEnabled()) {
String remoteAddr = null;
if (request.servletRequest().getRemoteAddr() != null) {
remoteAddr = request.servletRequest().getRemoteAddr();
}
write(updated, X_FORWARDED_FOR_HEADER, remoteAddr, properties.isForAppend(), trustedProxies::isTrusted);
}
String proto = request.uri().getScheme();
if (properties.isProtoEnabled()) {
write(updated, X_FORWARDED_PROTO_HEADER, proto, properties.isProtoAppend());
}
if (properties.isPrefixEnabled()) {
// If the path of the url that the gw is routing to is a subset
// (and ending part) of the url that it is routing from then the difference
// is the prefix e.g. if request original.com/prefix/get/ is routed
// to routedservice:8090/get then /prefix is the prefix
// - see XForwardedHeadersFilterTests, so first get uris, then extract paths
// and remove one from another if it's the ending part.
LinkedHashSet<URI> originalUris = MvcUtils.getAttribute(request,
MvcUtils.GATEWAY_ORIGINAL_REQUEST_URL_ATTR);
URI requestUri = request.uri();
if (originalUris != null && requestUri != null) {
originalUris.forEach(originalUri -> {
if (originalUri != null && originalUri.getPath() != null) {
// strip trailing slashes before checking if request path is end
// of original path
String originalUriPath = stripTrailingSlash(originalUri);
String requestUriPath = stripTrailingSlash(requestUri);
updateRequest(updated, originalUri, originalUriPath, requestUriPath);
}
});
}
}
if (properties.isPortEnabled()) {
String port = String.valueOf(request.uri().getPort());
if (request.uri().getPort() < 0) {
port = String.valueOf(getDefaultPort(proto));
}
write(updated, X_FORWARDED_PORT_HEADER, port, properties.isPortAppend());
}
if (properties.isHostEnabled()) {
String host = toHostHeader(request);
write(updated, X_FORWARDED_HOST_HEADER, host, properties.isHostAppend());
}
return updated;
}
private void updateRequest(HttpHeaders updated, URI originalUri, String originalUriPath,
@Nullable String requestUriPath) {
String prefix;
if (requestUriPath != null && (originalUriPath.endsWith(requestUriPath))) {
prefix = substringBeforeLast(originalUriPath, requestUriPath);
if (prefix != null && prefix.length() > 0 && prefix.length() <= originalUri.getPath().length()) {
write(updated, X_FORWARDED_PREFIX_HEADER, prefix, properties.isPrefixAppend());
}
}
}
private static @Nullable String substringBeforeLast(String str, String separator) {
if (ObjectUtils.isEmpty(str) || ObjectUtils.isEmpty(separator)) {
return str;
}
int pos = str.lastIndexOf(separator);
if (pos == -1) {
return str;
}
return str.substring(0, pos);
}
private void write(HttpHeaders headers, String name, @Nullable String value, boolean append) {
write(headers, name, value, append, s -> true);
}
private void write(HttpHeaders headers, String name, @Nullable String value, boolean append,
Predicate<String> shouldWrite) {
if (append) {
if (value != null) {
headers.add(name, value);
}
// these headers should be treated as a single comma separated header
if (headers.containsHeader(name)) {
List<String> values = headers.get(name);
if (values != null) {
List<String> filteredValues = values.stream().filter(shouldWrite).toList();
String delimitedValue = StringUtils.collectionToCommaDelimitedString(filteredValues);
headers.set(name, delimitedValue);
}
}
}
else if (value != null && shouldWrite.test(value)) {
headers.set(name, value);
}
}
private int getDefaultPort(String scheme) {
return HTTPS_SCHEME.equals(scheme) ? HTTPS_PORT : HTTP_PORT;
}
private String toHostHeader(ServerRequest request) {
int port = request.uri().getPort();
String host = request.uri().getHost();
String scheme = request.uri().getScheme();
if (port < 0 || (port == HTTP_PORT && HTTP_SCHEME.equals(scheme))
|| (port == HTTPS_PORT && HTTPS_SCHEME.equals(scheme))) {
return host;
}
else {
return host + ":" + port;
}
}
private String stripTrailingSlash(URI uri) {
if (uri.getPath().endsWith("/")) {
return uri.getPath().substring(0, uri.getPath().length() - 1);
}
else {
return uri.getPath();
}
}
}
|
XForwardedRequestHeadersFilter
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/core/fs/SafetyNetCloseableRegistry.java
|
{
"start": 2060,
"end": 5681
}
|
class ____
extends AbstractAutoCloseableRegistry<
Closeable,
WrappingProxyCloseable<? extends Closeable>,
SafetyNetCloseableRegistry.PhantomDelegatingCloseableRef,
IOException> {
private static final Logger LOG = LoggerFactory.getLogger(SafetyNetCloseableRegistry.class);
/** Lock for atomic modifications to reaper thread and registry count. */
private static final Object REAPER_THREAD_LOCK = new Object();
// CHECKSTYLE.OFF: StaticVariableName
/** Singleton reaper thread takes care of all registries in VM. */
private static CloseableReaperThread REAPER_THREAD = null;
/** Global count of all instances of SafetyNetCloseableRegistry. */
private static int GLOBAL_SAFETY_NET_REGISTRY_COUNT = 0;
// CHECKSTYLE.ON: StaticVariableName
SafetyNetCloseableRegistry() {
this(() -> new CloseableReaperThread());
}
@VisibleForTesting
SafetyNetCloseableRegistry(Supplier<CloseableReaperThread> reaperThreadSupplier) {
super(new IdentityHashMap<>());
synchronized (REAPER_THREAD_LOCK) {
if (0 == GLOBAL_SAFETY_NET_REGISTRY_COUNT) {
Preconditions.checkState(null == REAPER_THREAD);
try {
REAPER_THREAD = reaperThreadSupplier.get();
REAPER_THREAD.start();
} catch (Throwable throwable) {
REAPER_THREAD = null;
throw throwable;
}
}
++GLOBAL_SAFETY_NET_REGISTRY_COUNT;
}
}
@Override
protected void doRegister(
@Nonnull WrappingProxyCloseable<? extends Closeable> wrappingProxyCloseable,
@Nonnull Map<Closeable, PhantomDelegatingCloseableRef> closeableMap) {
assert Thread.holdsLock(getSynchronizationLock());
Closeable innerCloseable = WrappingProxyUtil.stripProxy(wrappingProxyCloseable);
if (null == innerCloseable) {
return;
}
PhantomDelegatingCloseableRef phantomRef =
new PhantomDelegatingCloseableRef(
wrappingProxyCloseable, this, REAPER_THREAD.referenceQueue);
closeableMap.put(innerCloseable, phantomRef);
}
@Override
protected boolean doUnRegister(
@Nonnull WrappingProxyCloseable<? extends Closeable> closeable,
@Nonnull Map<Closeable, PhantomDelegatingCloseableRef> closeableMap) {
assert Thread.holdsLock(getSynchronizationLock());
Closeable innerCloseable = WrappingProxyUtil.stripProxy(closeable);
return null != innerCloseable && closeableMap.remove(innerCloseable) != null;
}
/**
* This implementation doesn't imply any exception during closing due to backward compatibility.
*/
@Override
protected void doClose(List<Closeable> toClose) throws IOException {
try {
IOUtils.closeAllQuietly(toClose);
} finally {
synchronized (REAPER_THREAD_LOCK) {
--GLOBAL_SAFETY_NET_REGISTRY_COUNT;
if (0 == GLOBAL_SAFETY_NET_REGISTRY_COUNT) {
REAPER_THREAD.interrupt();
REAPER_THREAD = null;
}
}
}
}
@VisibleForTesting
static boolean isReaperThreadRunning() {
synchronized (REAPER_THREAD_LOCK) {
return null != REAPER_THREAD && REAPER_THREAD.isAlive();
}
}
/** Phantom reference to {@link WrappingProxyCloseable}. */
static final
|
SafetyNetCloseableRegistry
|
java
|
apache__maven
|
impl/maven-core/src/main/java/org/apache/maven/artifact/repository/metadata/io/DefaultMetadataReader.java
|
{
"start": 1385,
"end": 3006
}
|
class ____ implements MetadataReader {
@Override
public Metadata read(File input, Map<String, ?> options) throws IOException {
Objects.requireNonNull(input, "input cannot be null");
return read(Files.newInputStream(input.toPath()), options);
}
@Override
public Metadata read(Reader input, Map<String, ?> options) throws IOException {
Objects.requireNonNull(input, "input cannot be null");
try (Reader in = input) {
return new Metadata(new MetadataStaxReader().read(in, isStrict(options)));
} catch (XMLStreamException e) {
throw new MetadataParseException(
e.getMessage(),
e.getLocation().getLineNumber(),
e.getLocation().getColumnNumber(),
e);
}
}
@Override
public Metadata read(InputStream input, Map<String, ?> options) throws IOException {
Objects.requireNonNull(input, "input cannot be null");
try (InputStream in = input) {
return new Metadata(new MetadataStaxReader().read(in, isStrict(options)));
} catch (XMLStreamException e) {
throw new MetadataParseException(
e.getMessage(),
e.getLocation().getLineNumber(),
e.getLocation().getColumnNumber(),
e);
}
}
private boolean isStrict(Map<String, ?> options) {
Object value = (options != null) ? options.get(IS_STRICT) : null;
return value == null || Boolean.parseBoolean(value.toString());
}
}
|
DefaultMetadataReader
|
java
|
quarkusio__quarkus
|
integration-tests/vertx/src/test/java/io/quarkus/it/vertx/JsonReaderTest.java
|
{
"start": 432,
"end": 1725
}
|
class ____ {
private static final String URL_PREFIX = "/vertx-test/json-bodies/";
@Test
public void testJson() {
String body = new JsonObject().put("Hello", "World").toString();
given().contentType(ContentType.JSON).body(body)
.post(URL_PREFIX + "json/sync")
.then().statusCode(200).body(equalTo("Hello World"));
}
@Test
public void testEmptyJson() {
given().contentType(ContentType.JSON).body("")
.post(URL_PREFIX + "json/sync")
.then().statusCode(400);
}
@Test
public void testArray() {
String body = new JsonArray().add("Hello").add("World").toString();
given().contentType(ContentType.JSON).body(body)
.post(URL_PREFIX + "array/sync")
.then().statusCode(200).body(equalTo("Hello World"));
}
@Test
public void testEmptyArray() {
given().contentType(ContentType.JSON).body("")
.post(URL_PREFIX + "array/sync")
.then().statusCode(400);
}
@Test
public void testJsonMappingUsingJackson() {
get(URL_PREFIX + "json/mapping").then()
.body("firstName", is("jack"))
.body("lastName", is("rabbit"));
}
}
|
JsonReaderTest
|
java
|
apache__kafka
|
streams/src/test/java/org/apache/kafka/streams/internals/metrics/ClientMetricsTest.java
|
{
"start": 1632,
"end": 8574
}
|
class ____ {
private static final String APPLICATION_ID = "test-application-id";
private static final String COMMIT_ID = "test-commit-ID";
private static final String PROCESS_ID = "test-process-id";
private static final String VERSION = "test-version";
private final StreamsMetricsImpl streamsMetrics = mock(StreamsMetricsImpl.class);
private final Sensor expectedSensor = mock(Sensor.class);
private final Map<String, String> tagMap = Collections.singletonMap("hello", "world");
@Test
public void shouldAddVersionMetric() {
final String name = "version";
final String description = "The version of the Kafka Streams client";
setUpAndVerifyImmutableMetric(name, description, VERSION, () -> ClientMetrics.addVersionMetric(streamsMetrics));
}
@Test
public void shouldAddCommitIdMetric() {
final String name = "commit-id";
final String description = "The version control commit ID of the Kafka Streams client";
setUpAndVerifyImmutableMetric(name, description, COMMIT_ID, () -> ClientMetrics.addCommitIdMetric(streamsMetrics));
}
@Test
public void shouldAddApplicationIdMetric() {
final String name = "application-id";
final String description = "The application ID of the Kafka Streams client";
final String applicationId = "thisIsAnID";
setUpAndVerifyImmutableMetric(
name,
description,
applicationId,
() -> ClientMetrics.addApplicationIdMetric(streamsMetrics, applicationId)
);
}
@Test
public void shouldAddTopologyDescriptionMetric() {
final String name = "topology-description";
final String description = "The description of the topology executed in the Kafka Streams client";
final String topologyDescription = "thisIsATopologyDescription";
final Gauge<String> topologyDescriptionProvider = (c, n) -> topologyDescription;
setUpAndVerifyMutableMetric(
name,
description,
topologyDescriptionProvider,
() -> ClientMetrics.addTopologyDescriptionMetric(streamsMetrics, topologyDescriptionProvider)
);
}
@Test
public void shouldAddStateMetric() {
final String name = "state";
final String description = "The state of the Kafka Streams client";
final Gauge<String> stateProvider = (config, now) -> State.RUNNING.name();
setUpAndVerifyMutableMetric(
name,
description,
stateProvider,
() -> ClientMetrics.addStateMetric(streamsMetrics, stateProvider)
);
}
@Test
public void shouldAddAliveStreamThreadsMetric() {
final String name = "alive-stream-threads";
final String description = "The current number of alive stream threads that are running or participating in rebalance";
final Gauge<Integer> valueProvider = (config, now) -> 1;
setUpAndVerifyMutableMetric(
name,
description,
valueProvider,
() -> ClientMetrics.addNumAliveStreamThreadMetric(streamsMetrics, valueProvider)
);
}
@Test
public void shouldAddClientStateTelemetryMetric() {
final String name = "client-state";
final String description = "The state of the Kafka Streams client";
final Gauge<Integer> stateProvider = (config, now) -> State.RUNNING.ordinal();
final Map<String, String> additionalTags = new LinkedHashMap<>();
additionalTags.put("process-id", PROCESS_ID);
additionalTags.put("application-id", APPLICATION_ID);
ClientMetrics.addClientStateTelemetryMetric(PROCESS_ID, APPLICATION_ID, streamsMetrics, stateProvider);
verify(streamsMetrics).addClientLevelMutableMetric(
eq(name),
eq(description),
eq(additionalTags),
eq(RecordingLevel.INFO),
eq(stateProvider)
);
}
@Test
public void shouldAddRecordingLevelMetric() {
final String name = "recording-level";
final String description = "The metrics recording level of the Kafka Streams client";
final int recordingLevel = 1;
ClientMetrics.addClientRecordingLevelMetric(PROCESS_ID, streamsMetrics, recordingLevel);
verify(streamsMetrics).addClientLevelImmutableMetric(
eq(name),
eq(description),
eq(Collections.singletonMap("process-id", PROCESS_ID)),
eq(RecordingLevel.INFO),
eq(recordingLevel)
);
}
@Test
public void shouldGetFailedStreamThreadsSensor() {
final String name = "failed-stream-threads";
final String description = "The number of failed stream threads since the start of the Kafka Streams client";
when(streamsMetrics.clientLevelSensor(name, RecordingLevel.INFO)).thenReturn(expectedSensor);
when(streamsMetrics.clientLevelTagMap()).thenReturn(tagMap);
StreamsMetricsImpl.addSumMetricToSensor(
expectedSensor,
CLIENT_LEVEL_GROUP,
tagMap,
name,
false,
description
);
final Sensor sensor = ClientMetrics.failedStreamThreadSensor(streamsMetrics);
assertThat(sensor, is(expectedSensor));
}
private <K> void setUpAndVerifyMutableMetric(final String name,
final String description,
final Gauge<K> valueProvider,
final Runnable metricAdder) {
metricAdder.run();
verify(streamsMetrics).addClientLevelMutableMetric(
eq(name),
eq(description),
eq(RecordingLevel.INFO),
eq(valueProvider)
);
}
private void setUpAndVerifyImmutableMetric(final String name,
final String description,
final String value,
final Runnable metricAdder) {
metricAdder.run();
verify(streamsMetrics).addClientLevelImmutableMetric(
eq(name),
eq(description),
eq(RecordingLevel.INFO),
eq(value)
);
}
private void setUpAndVerifyImmutableMetric(final String name,
final String description,
final int value,
final Runnable metricAdder) {
metricAdder.run();
verify(streamsMetrics).addClientLevelImmutableMetric(
eq(name),
eq(description),
eq(RecordingLevel.INFO),
eq(value)
);
}
}
|
ClientMetricsTest
|
java
|
bumptech__glide
|
library/test/src/test/java/com/bumptech/glide/tests/Util.java
|
{
"start": 4244,
"end": 4640
}
|
class ____ implements Answer<Bitmap> {
@Override
public Bitmap answer(InvocationOnMock invocation) throws Throwable {
int width = (Integer) invocation.getArguments()[0];
int height = (Integer) invocation.getArguments()[1];
Bitmap.Config config = (Bitmap.Config) invocation.getArguments()[2];
return Bitmap.createBitmap(width, height, config);
}
}
}
|
CreateBitmap
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/inference/InferenceFunction.java
|
{
"start": 594,
"end": 692
}
|
class ____ ESQL functions that use inference endpoints (e.g., TEXT_EMBEDDING).
*/
public abstract
|
for
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/DefaultCharsetTest.java
|
{
"start": 15084,
"end": 15673
}
|
class ____ {
void f() throws Exception {
copyFrom("hello", Charset.defaultCharset());
}
}
""")
.setFixChooser(FixChoosers.SECOND)
.doTest();
}
@Test
public void scannerDefaultCharset() {
refactoringTest()
.addInputLines(
"in/Test.java",
"""
import java.util.Scanner;
import java.io.File;
import java.io.InputStream;
import java.nio.channels.ReadableByteChannel;
import java.nio.file.Path;
|
Test
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/records/tofix/RecordWithJsonIgnoredMethod5184Test.java
|
{
"start": 473,
"end": 977
}
|
class ____
extends DatabindTestUtil
{
record TestData5184(@JsonProperty("test_property") String value) {
@JsonIgnore
public Optional<String> getValue() {
return Optional.ofNullable(value);
}
}
record TestData5184Alternate(@JsonProperty("test_property") String value) {
@JsonIgnore
public Optional<String> optionalValue() {
return Optional.ofNullable(value);
}
}
static final
|
RecordWithJsonIgnoredMethod5184Test
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/search/arguments/AggregateArgs.java
|
{
"start": 19588,
"end": 20016
}
|
class ____<K, V> implements PipelineOperation<K, V> {
final long offset;
final long num;
Limit(long offset, long num) {
this.offset = offset;
this.num = num;
}
@Override
public void build(CommandArgs<K, V> args) {
args.add(CommandKeyword.LIMIT);
args.add(offset);
args.add(num);
}
}
public static
|
Limit
|
java
|
apache__rocketmq
|
tools/src/main/java/org/apache/rocketmq/tools/command/SubCommandException.java
|
{
"start": 853,
"end": 1350
}
|
class ____ extends Exception {
private static final long serialVersionUID = 0L;
/**
* @param msg Message.
*/
public SubCommandException(String msg) {
super(msg);
}
public SubCommandException(String format, Object... args) {
super(String.format(format, args));
}
/**
* @param msg Message.
* @param cause Cause.
*/
public SubCommandException(String msg, Throwable cause) {
super(msg, cause);
}
}
|
SubCommandException
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/shortarrays/ShortArrays_assertContainsExactlyInAnyOrder_Test.java
|
{
"start": 1850,
"end": 11708
}
|
class ____ extends ShortArraysBaseTest {
@Test
void should_pass_if_actual_contains_given_values_exactly_in_any_order() {
arrays.assertContainsExactlyInAnyOrder(someInfo(), actual, arrayOf(6, 8, 10));
}
@Test
void should_pass_if_actual_and_given_values_are_empty() {
arrays.assertContainsExactlyInAnyOrder(someInfo(), emptyArray(), emptyArray());
}
@Test
void should_pass_if_actual_contains_given_values_exactly_but_in_different_order() {
AssertionInfo info = someInfo();
arrays.assertContainsExactlyInAnyOrder(info, actual, arrayOf(6, 10, 8));
}
@Test
void should_fail_if_arrays_have_different_sizes() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertContainsExactlyInAnyOrder(someInfo(), actual,
arrayOf(6, 8)));
}
@Test
void should_fail_if_expected_is_empty_and_actual_is_not() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertContainsExactlyInAnyOrder(someInfo(), actual,
emptyArray()));
}
@Test
void should_throw_error_if_expected_is_null() {
assertThatNullPointerException().isThrownBy(() -> arrays.assertContainsExactlyInAnyOrder(someInfo(), actual, null))
.withMessage(valuesToLookForIsNull());
}
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertContainsExactlyInAnyOrder(someInfo(), null,
arrayOf(8)))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_actual_does_not_contain_given_values_exactly() {
AssertionInfo info = someInfo();
short[] expected = { 6, 8, 20 };
Throwable error = catchThrowable(() -> arrays.assertContainsExactlyInAnyOrder(info, actual, expected));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info,
shouldContainExactlyInAnyOrder(actual, expected, newArrayList((short) 20), newArrayList((short) 10),
StandardComparisonStrategy.instance()));
}
@Test
void should_fail_if_actual_contains_all_given_values_but_size_differ() {
AssertionInfo info = someInfo();
short[] expected = { 6, 8 };
Throwable error = catchThrowable(() -> arrays.assertContainsExactlyInAnyOrder(info, actual, expected));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info,
shouldContainExactlyInAnyOrder(actual, expected, emptyList(), newArrayList((short) 10),
StandardComparisonStrategy.instance()));
}
@Test
void should_fail_if_actual_contains_duplicates_and_expected_does_not() {
AssertionInfo info = someInfo();
actual = arrayOf(1, 2, 3);
short[] expected = { 1, 2 };
Throwable error = catchThrowable(() -> arrays.assertContainsExactlyInAnyOrder(info, actual, expected));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info,
shouldContainExactlyInAnyOrder(actual, expected, emptyList(), newArrayList((short) 3),
StandardComparisonStrategy.instance()));
}
@Test
void should_fail_if_expected_contains_duplicates_and_actual_does_not() {
AssertionInfo info = someInfo();
actual = arrayOf(1, 2);
short[] expected = { 1, 2, 3 };
Throwable error = catchThrowable(() -> arrays.assertContainsExactlyInAnyOrder(info, actual, expected));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info,
shouldContainExactlyInAnyOrder(actual, expected, newArrayList((short) 3), emptyList(),
StandardComparisonStrategy.instance()));
}
// ------------------------------------------------------------------------------------------------------------------
// tests using a custom comparison strategy
// ------------------------------------------------------------------------------------------------------------------
@Test
void should_pass_if_actual_contains_given_values_exactly_in_any_order_according_to_custom_comparison_strategy() {
arraysWithCustomComparisonStrategy.assertContainsExactlyInAnyOrder(someInfo(), actual, arrayOf(6, -8, 10));
}
@Test
void should_pass_if_actual_contains_given_values_exactly_in_different_order_according_to_custom_comparison_strategy() {
short[] expected = { -6, 10, 8 };
arraysWithCustomComparisonStrategy.assertContainsExactlyInAnyOrder(someInfo(), actual, expected);
}
@Test
void should_fail_if_expected_is_empty_and_actual_is_not_whatever_custom_comparison_strategy_is() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arraysWithCustomComparisonStrategy.assertContainsExactlyInAnyOrder(someInfo(),
actual,
emptyArray()));
}
@Test
void should_throw_error_if_expected_is_null_whatever_custom_comparison_strategy_is() {
assertThatNullPointerException().isThrownBy(() -> arraysWithCustomComparisonStrategy.assertContainsExactlyInAnyOrder(someInfo(),
actual,
null))
.withMessage(valuesToLookForIsNull());
}
@Test
void should_fail_if_actual_is_null_whatever_custom_comparison_strategy_is() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arraysWithCustomComparisonStrategy.assertContainsExactlyInAnyOrder(someInfo(),
null,
arrayOf(-8)))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_actual_does_not_contain_given_values_exactly_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
short[] expected = { 6, -8, 20 };
Throwable error = catchThrowable(() -> arraysWithCustomComparisonStrategy.assertContainsExactlyInAnyOrder(info, actual,
expected));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldContainExactlyInAnyOrder(actual, expected, newArrayList((short) 20),
newArrayList((short) 10), absValueComparisonStrategy));
}
@Test
void should_fail_if_actual_contains_all_given_values_but_size_differ_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
short[] expected = { 6, 8 };
Throwable error = catchThrowable(() -> arraysWithCustomComparisonStrategy.assertContainsExactlyInAnyOrder(info, actual,
expected));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldContainExactlyInAnyOrder(actual, expected, emptyList(), newArrayList((short) 10),
absValueComparisonStrategy));
}
@Test
void should_fail_if_actual_contains_duplicates_and_expected_does_notaccording_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
actual = arrayOf(1, 2, 3);
short[] expected = { 1, 2 };
Throwable error = catchThrowable(() -> arraysWithCustomComparisonStrategy.assertContainsExactlyInAnyOrder(info, actual,
expected));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info,
shouldContainExactlyInAnyOrder(actual, expected, emptyList(), newArrayList((short) 3),
absValueComparisonStrategy));
}
@Test
void should_fail_if_expected_contains_duplicates_and_actual_does_notaccording_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
actual = arrayOf(1, 2);
short[] expected = { 1, 2, 3 };
Throwable error = catchThrowable(() -> arraysWithCustomComparisonStrategy.assertContainsExactlyInAnyOrder(info, actual,
expected));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info,
shouldContainExactlyInAnyOrder(actual, expected, newArrayList((short) 3), emptyList(),
absValueComparisonStrategy));
}
}
|
ShortArrays_assertContainsExactlyInAnyOrder_Test
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/main/java/io/micronaut/annotation/processing/visitor/AbstractTypeAwareJavaElement.java
|
{
"start": 1103,
"end": 3373
}
|
class ____ extends AbstractJavaElement {
/**
* The constructor.
*
* @param nativeElement The {@link Element}
* @param annotationMetadataFactory The annotation metadata factory
* @param visitorContext The Java visitor context
*/
AbstractTypeAwareJavaElement(JavaNativeElement nativeElement, ElementAnnotationMetadataFactory annotationMetadataFactory, JavaVisitorContext visitorContext) {
super(nativeElement, annotationMetadataFactory, visitorContext);
}
/**
* Checks if the element is explicitly marked as null-marked.
*
* @return true if the element is marked as null-marked, false otherwise
*/
protected abstract boolean hasNullMarked();
/**
* Retrieves the metadata of annotations associated with the type of the current element.
*
* @return the {@link AnnotationMetadata} representing the annotations of the type.
*/
protected abstract AnnotationMetadata getTypeAnnotationMetadata();
@Override
public final boolean isDeclaredNullable() {
return getAnnotationMetadata().hasDeclaredStereotype(AnnotationUtil.NULLABLE)
|| getTypeAnnotationMetadata().hasDeclaredStereotype(AnnotationUtil.NULLABLE);
}
@Override
public final boolean isNullable() {
return getAnnotationMetadata().hasStereotype(AnnotationUtil.NULLABLE)
|| getTypeAnnotationMetadata().hasStereotype(AnnotationUtil.NULLABLE);
}
@Override
public final boolean isNonNull() {
return getAnnotationMetadata().hasStereotype(AnnotationUtil.NON_NULL)
|| getTypeAnnotationMetadata().hasStereotype(AnnotationUtil.NON_NULL)
|| hasNullMarked() && !isNullable();
}
@Override
public final boolean isDeclaredNonNull() {
return getAnnotationMetadata().hasDeclaredStereotype(AnnotationUtil.NON_NULL)
|| getTypeAnnotationMetadata().hasDeclaredStereotype(AnnotationUtil.NON_NULL);
}
protected final boolean canBeMarkedWithNonNull(ClassElement classElement) {
return !classElement.isVoid()
&& !classElement.isPrimitive()
&& !classElement.isNullable()
&& hasNullMarked();
}
}
|
AbstractTypeAwareJavaElement
|
java
|
apache__kafka
|
streams/examples/src/test/java/org/apache/kafka/streams/examples/wordcount/WordCountProcessorTest.java
|
{
"start": 1570,
"end": 3372
}
|
class ____ {
@Test
public void test() {
final MockProcessorContext<String, String> context = new MockProcessorContext<>();
// Create, initialize, and register the state store.
final KeyValueStore<String, Integer> store =
Stores.keyValueStoreBuilder(Stores.inMemoryKeyValueStore("Counts"), Serdes.String(), Serdes.Integer())
.withLoggingDisabled() // Changelog is not supported by MockProcessorContext.
// Caching is disabled by default, but FYI: caching is also not supported by MockProcessorContext.
.build();
store.init(context.getStateStoreContext(), store);
// Create and initialize the processor under test
final Processor<String, String, String, String> processor = new WordCountProcessorDemo.WordCountProcessor();
processor.init(context);
// send a record to the processor
processor.process(new Record<>("key", "alpha beta\tgamma\n\talpha", 0L));
// note that the processor does not forward during process()
assertTrue(context.forwarded().isEmpty());
// now, we trigger the punctuator, which iterates over the state store and forwards the contents.
context.scheduledPunctuators().get(0).getPunctuator().punctuate(0L);
// finally, we can verify the output.
final List<MockProcessorContext.CapturedForward<String, String>> expected = Arrays.asList(
new MockProcessorContext.CapturedForward<>(new Record<>("alpha", "2", 0L)),
new MockProcessorContext.CapturedForward<>(new Record<>("beta", "1", 0L)),
new MockProcessorContext.CapturedForward<>(new Record<>("gamma", "1", 0L))
);
assertThat(context.forwarded(), is(expected));
}
}
|
WordCountProcessorTest
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java
|
{
"start": 3484,
"end": 10242
}
|
interface ____
* access to files. This abstraction is used both by Flink's fault tolerance mechanism (storing
* state and recovery data) and by reusable built-in connectors (file sources / sinks).
*
* <p>The purpose of this abstraction is <b>not</b> to give user programs an abstraction with
* extreme flexibility and control across all possible file systems. That mission would be a folly,
* as the differences in characteristics of even the most common file systems are already quite
* large. It is expected that user programs that need specialized functionality of certain file
* systems in their functions, operations, sources, or sinks instantiate the specialized file system
* adapters directly.
*
* <h2>Data Persistence Contract</h2>
*
* <p>The FileSystem's {@link FSDataOutputStream output streams} are used to persistently store
* data, both for results of streaming applications and for fault tolerance and recovery. It is
* therefore crucial that the persistence semantics of these streams are well defined.
*
* <h3>Definition of Persistence Guarantees</h3>
*
* <p>Data written to an output stream is considered persistent, if two requirements are met:
*
* <ol>
* <li><b>Visibility Requirement:</b> It must be guaranteed that all other processes, machines,
* virtual machines, containers, etc. that are able to access the file see the data
* consistently when given the absolute file path. This requirement is similar to the
* <i>close-to-open</i> semantics defined by POSIX, but restricted to the file itself (by its
* absolute path).
* <li><b>Durability Requirement:</b> The file system's specific durability/persistence
* requirements must be met. These are specific to the particular file system. For example the
* {@link LocalFileSystem} does not provide any durability guarantees for crashes of both
* hardware and operating system, while replicated distributed file systems (like HDFS)
* typically guarantee durability in the presence of at most <i>n</i> concurrent node
* failures, where <i>n</i> is the replication factor.
* </ol>
*
* <p>Updates to the file's parent directory (such that the file shows up when listing the directory
* contents) are not required to be complete for the data in the file stream to be considered
* persistent. This relaxation is important for file systems where updates to directory contents are
* only eventually consistent.
*
* <p>The {@link FSDataOutputStream} has to guarantee data persistence for the written bytes once
* the call to {@link FSDataOutputStream#close()} returns.
*
* <h3>Examples</h3>
*
* <h4>Fault-tolerant distributed file systems</h4>
*
* <p>For <b>fault-tolerant distributed file systems</b>, data is considered persistent once it has
* been received and acknowledged by the file system, typically by having been replicated to a
* quorum of machines (<i>durability requirement</i>). In addition the absolute file path must be
* visible to all other machines that will potentially access the file (<i>visibility
* requirement</i>).
*
* <p>Whether data has hit non-volatile storage on the storage nodes depends on the specific
* guarantees of the particular file system.
*
* <p>The metadata updates to the file's parent directory are not required to have reached a
* consistent state. It is permissible that some machines see the file when listing the parent
* directory's contents while others do not, as long as access to the file by its absolute path is
* possible on all nodes.
*
* <h4>Local file systems</h4>
*
* <p>A <b>local file system</b> must support the POSIX <i>close-to-open</i> semantics. Because the
* local file system does not have any fault tolerance guarantees, no further requirements exist.
*
* <p>The above implies specifically that data may still be in the OS cache when considered
* persistent from the local file system's perspective. Crashes that cause the OS cache to lose data
* are considered fatal to the local machine and are not covered by the local file system's
* guarantees as defined by Flink.
*
* <p>That means that computed results, checkpoints, and savepoints that are written only to the
* local filesystem are not guaranteed to be recoverable from the local machine's failure, making
* local file systems unsuitable for production setups.
*
* <h2>Updating File Contents</h2>
*
* <p>Many file systems either do not support overwriting contents of existing files at all, or do
* not support consistent visibility of the updated contents in that case. For that reason, Flink's
* FileSystem does not support appending to existing files, or seeking within output streams so that
* previously written data could be overwritten.
*
* <h2>Overwriting Files</h2>
*
* <p>Overwriting files is in general possible. A file is overwritten by deleting it and creating a
* new file. However, certain filesystems cannot make that change synchronously visible to all
* parties that have access to the file. For example <a
* href="https://aws.amazon.com/documentation/s3/">Amazon S3</a> guarantees only <i>eventual
* consistency</i> in the visibility of the file replacement: Some machines may see the old file,
* some machines may see the new file.
*
* <p>To avoid these consistency issues, the implementations of failure/recovery mechanisms in Flink
* strictly avoid writing to the same file path more than once.
*
* <h2>Thread Safety</h2>
*
* <p>Implementations of {@code FileSystem} must be thread-safe: The same instance of FileSystem is
* frequently shared across multiple threads in Flink and must be able to concurrently create
* input/output streams and list file metadata.
*
* <p>The {@link FSDataInputStream} and {@link FSDataOutputStream} implementations are strictly
* <b>not thread-safe</b>. Instances of the streams should also not be passed between threads in
* between read or write operations, because there are no guarantees about the visibility of
* operations across threads (many operations do not create memory fences).
*
* <h2>Streams Safety Net</h2>
*
* <p>When application code obtains a FileSystem (via {@link FileSystem#get(URI)} or via {@link
* Path#getFileSystem()}), the FileSystem instantiates a safety net for that FileSystem. The safety
* net ensures that all streams created from the FileSystem are closed when the application task
* finishes (or is canceled or failed). That way, the task's threads do not leak connections.
*
* <p>Internal runtime code can explicitly obtain a FileSystem that does not use the safety net via
* {@link FileSystem#getUnguardedFileSystem(URI)}.
*
* @see FSDataInputStream
* @see FSDataOutputStream
*/
@Public
public abstract
|
for
|
java
|
netty__netty
|
codec-http/src/main/java/io/netty/handler/codec/http/websocketx/WebSocket08FrameDecoder.java
|
{
"start": 3156,
"end": 19560
}
|
enum ____ {
READING_FIRST,
READING_SECOND,
READING_SIZE,
MASKING_KEY,
PAYLOAD,
CORRUPT
}
private static final InternalLogger logger = InternalLoggerFactory.getInstance(WebSocket08FrameDecoder.class);
private static final byte OPCODE_CONT = 0x0;
private static final byte OPCODE_TEXT = 0x1;
private static final byte OPCODE_BINARY = 0x2;
private static final byte OPCODE_CLOSE = 0x8;
private static final byte OPCODE_PING = 0x9;
private static final byte OPCODE_PONG = 0xA;
private final WebSocketDecoderConfig config;
private int fragmentedFramesCount;
private boolean frameFinalFlag;
private boolean frameMasked;
private int frameRsv;
private int frameOpcode;
private long framePayloadLength;
private int mask;
private int framePayloadLen1;
private boolean receivedClosingHandshake;
private State state = State.READING_FIRST;
/**
* Constructor
*
* @param expectMaskedFrames
* Web socket servers must set this to true processed incoming masked payload. Client implementations
* must set this to false.
* @param allowExtensions
* Flag to allow reserved extension bits to be used or not
* @param maxFramePayloadLength
* Maximum length of a frame's payload. Setting this to an appropriate value for you application
* helps check for denial of services attacks.
*/
public WebSocket08FrameDecoder(boolean expectMaskedFrames, boolean allowExtensions, int maxFramePayloadLength) {
this(expectMaskedFrames, allowExtensions, maxFramePayloadLength, false);
}
/**
* Constructor
*
* @param expectMaskedFrames
* Web socket servers must set this to true processed incoming masked payload. Client implementations
* must set this to false.
* @param allowExtensions
* Flag to allow reserved extension bits to be used or not
* @param maxFramePayloadLength
* Maximum length of a frame's payload. Setting this to an appropriate value for you application
* helps check for denial of services attacks.
* @param allowMaskMismatch
* When set to true, frames which are not masked properly according to the standard will still be
* accepted.
*/
public WebSocket08FrameDecoder(boolean expectMaskedFrames, boolean allowExtensions, int maxFramePayloadLength,
boolean allowMaskMismatch) {
this(WebSocketDecoderConfig.newBuilder()
.expectMaskedFrames(expectMaskedFrames)
.allowExtensions(allowExtensions)
.maxFramePayloadLength(maxFramePayloadLength)
.allowMaskMismatch(allowMaskMismatch)
.build());
}
/**
* Constructor
*
* @param decoderConfig
* Frames decoder configuration.
*/
public WebSocket08FrameDecoder(WebSocketDecoderConfig decoderConfig) {
this.config = ObjectUtil.checkNotNull(decoderConfig, "decoderConfig");
}
@Override
protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception {
// Discard all data received if closing handshake was received before.
if (receivedClosingHandshake) {
in.skipBytes(actualReadableBytes());
return;
}
switch (state) {
case READING_FIRST:
if (!in.isReadable()) {
return;
}
framePayloadLength = 0;
// FIN, RSV, OPCODE
byte b = in.readByte();
frameFinalFlag = (b & 0x80) != 0;
frameRsv = (b & 0x70) >> 4;
frameOpcode = b & 0x0F;
if (logger.isTraceEnabled()) {
logger.trace("Decoding WebSocket Frame opCode={}", frameOpcode);
}
state = State.READING_SECOND;
case READING_SECOND:
if (!in.isReadable()) {
return;
}
// MASK, PAYLOAD LEN 1
b = in.readByte();
frameMasked = (b & 0x80) != 0;
framePayloadLen1 = b & 0x7F;
if (frameRsv != 0 && !config.allowExtensions()) {
protocolViolation(ctx, in, "RSV != 0 and no extension negotiated, RSV:" + frameRsv);
return;
}
if (!config.allowMaskMismatch() && config.expectMaskedFrames() != frameMasked) {
protocolViolation(ctx, in, "received a frame that is not masked as expected");
return;
}
if (frameOpcode > 7) { // control frame (have MSB in opcode set)
// control frames MUST NOT be fragmented
if (!frameFinalFlag) {
protocolViolation(ctx, in, "fragmented control frame");
return;
}
// control frames MUST have payload 125 octets or less
if (framePayloadLen1 > 125) {
protocolViolation(ctx, in, "control frame with payload length > 125 octets");
return;
}
// check for reserved control frame opcodes
if (!(frameOpcode == OPCODE_CLOSE || frameOpcode == OPCODE_PING
|| frameOpcode == OPCODE_PONG)) {
protocolViolation(ctx, in, "control frame using reserved opcode " + frameOpcode);
return;
}
// close frame : if there is a body, the first two bytes of the
// body MUST be a 2-byte unsigned integer (in network byte
// order) representing a getStatus code
if (frameOpcode == 8 && framePayloadLen1 == 1) {
protocolViolation(ctx, in, "received close control frame with payload len 1");
return;
}
} else { // data frame
// check for reserved data frame opcodes
if (!(frameOpcode == OPCODE_CONT || frameOpcode == OPCODE_TEXT
|| frameOpcode == OPCODE_BINARY)) {
protocolViolation(ctx, in, "data frame using reserved opcode " + frameOpcode);
return;
}
// check opcode vs message fragmentation state 1/2
if (fragmentedFramesCount == 0 && frameOpcode == OPCODE_CONT) {
protocolViolation(ctx, in, "received continuation data frame outside fragmented message");
return;
}
// check opcode vs message fragmentation state 2/2
if (fragmentedFramesCount != 0 && frameOpcode != OPCODE_CONT) {
protocolViolation(ctx, in,
"received non-continuation data frame while inside fragmented message");
return;
}
}
state = State.READING_SIZE;
case READING_SIZE:
// Read frame payload length
if (framePayloadLen1 == 126) {
if (in.readableBytes() < 2) {
return;
}
framePayloadLength = in.readUnsignedShort();
if (framePayloadLength < 126) {
protocolViolation(ctx, in, "invalid data frame length (not using minimal length encoding)");
return;
}
} else if (framePayloadLen1 == 127) {
if (in.readableBytes() < 8) {
return;
}
framePayloadLength = in.readLong();
if (framePayloadLength < 0) {
protocolViolation(ctx, in, "invalid data frame length (negative length)");
return;
}
if (framePayloadLength < 65536) {
protocolViolation(ctx, in, "invalid data frame length (not using minimal length encoding)");
return;
}
} else {
framePayloadLength = framePayloadLen1;
}
if (framePayloadLength > config.maxFramePayloadLength()) {
protocolViolation(ctx, in, WebSocketCloseStatus.MESSAGE_TOO_BIG,
"Max frame length of " + config.maxFramePayloadLength() + " has been exceeded.");
return;
}
if (logger.isTraceEnabled()) {
logger.trace("Decoding WebSocket Frame length={}", framePayloadLength);
}
state = State.MASKING_KEY;
case MASKING_KEY:
if (frameMasked) {
if (in.readableBytes() < 4) {
return;
}
mask = in.readInt();
}
state = State.PAYLOAD;
case PAYLOAD:
if (in.readableBytes() < framePayloadLength) {
return;
}
ByteBuf payloadBuffer = Unpooled.EMPTY_BUFFER;
try {
if (framePayloadLength > 0) {
payloadBuffer = readBytes(ctx.alloc(), in, toFrameLength(framePayloadLength));
}
// Now we have all the data, the next checkpoint must be the next
// frame
state = State.READING_FIRST;
// Unmask data if needed
if (frameMasked & framePayloadLength > 0) {
unmask(payloadBuffer);
}
// Processing ping/pong/close frames because they cannot be
// fragmented
if (frameOpcode == OPCODE_PING) {
out.add(new PingWebSocketFrame(frameFinalFlag, frameRsv, payloadBuffer));
payloadBuffer = null;
return;
}
if (frameOpcode == OPCODE_PONG) {
out.add(new PongWebSocketFrame(frameFinalFlag, frameRsv, payloadBuffer));
payloadBuffer = null;
return;
}
if (frameOpcode == OPCODE_CLOSE) {
receivedClosingHandshake = true;
checkCloseFrameBody(ctx, payloadBuffer);
out.add(new CloseWebSocketFrame(frameFinalFlag, frameRsv, payloadBuffer));
payloadBuffer = null;
return;
}
// Processing for possible fragmented messages for text and binary
// frames
if (frameFinalFlag) {
// Final frame of the sequence. Apparently ping frames are
// allowed in the middle of a fragmented message
fragmentedFramesCount = 0;
} else {
// Increment counter
fragmentedFramesCount++;
}
// Return the frame
if (frameOpcode == OPCODE_TEXT) {
out.add(new TextWebSocketFrame(frameFinalFlag, frameRsv, payloadBuffer));
payloadBuffer = null;
return;
} else if (frameOpcode == OPCODE_BINARY) {
out.add(new BinaryWebSocketFrame(frameFinalFlag, frameRsv, payloadBuffer));
payloadBuffer = null;
return;
} else if (frameOpcode == OPCODE_CONT) {
out.add(new ContinuationWebSocketFrame(frameFinalFlag, frameRsv,
payloadBuffer));
payloadBuffer = null;
return;
} else {
throw new UnsupportedOperationException("Cannot decode web socket frame with opcode: "
+ frameOpcode);
}
} finally {
if (payloadBuffer != null) {
payloadBuffer.release();
}
}
case CORRUPT:
if (in.isReadable()) {
// If we don't keep reading Netty will throw an exception saying
// we can't return null if no bytes read and state not changed.
in.readByte();
}
return;
default:
throw new Error("Shouldn't reach here (state: " + state + ")");
}
}
private void unmask(ByteBuf frame) {
int i = frame.readerIndex();
int end = frame.writerIndex();
ByteOrder order = frame.order();
int intMask = mask;
if (intMask == 0) {
// If the mask is 0 we can just return directly as the XOR operations will just produce the same value.
return;
}
// Avoid sign extension on widening primitive conversion
long longMask = intMask & 0xFFFFFFFFL;
longMask |= longMask << 32;
for (int lim = end - 7; i < lim; i += 8) {
frame.setLong(i, frame.getLong(i) ^ longMask);
}
if (i < end - 3) {
frame.setInt(i, frame.getInt(i) ^ (int) longMask);
i += 4;
}
if (order == ByteOrder.LITTLE_ENDIAN) {
intMask = Integer.reverseBytes(intMask);
}
int maskOffset = 0;
for (; i < end; i++) {
frame.setByte(i, frame.getByte(i) ^ WebSocketUtil.byteAtIndex(intMask, maskOffset++ & 3));
}
}
private void protocolViolation(ChannelHandlerContext ctx, ByteBuf in, String reason) {
protocolViolation(ctx, in, WebSocketCloseStatus.PROTOCOL_ERROR, reason);
}
private void protocolViolation(ChannelHandlerContext ctx, ByteBuf in, WebSocketCloseStatus status, String reason) {
protocolViolation(ctx, in, new CorruptedWebSocketFrameException(status, reason));
}
private void protocolViolation(ChannelHandlerContext ctx, ByteBuf in, CorruptedWebSocketFrameException ex) {
state = State.CORRUPT;
int readableBytes = in.readableBytes();
if (readableBytes > 0) {
// Fix for memory leak, caused by ByteToMessageDecoder#channelRead:
// buffer 'cumulation' is released ONLY when no more readable bytes available.
in.skipBytes(readableBytes);
}
if (ctx.channel().isActive() && config.closeOnProtocolViolation()) {
Object closeMessage;
if (receivedClosingHandshake) {
closeMessage = Unpooled.EMPTY_BUFFER;
} else {
WebSocketCloseStatus closeStatus = ex.closeStatus();
String reasonText = ex.getMessage();
if (reasonText == null) {
reasonText = closeStatus.reasonText();
}
closeMessage = new CloseWebSocketFrame(closeStatus, reasonText);
}
ctx.writeAndFlush(closeMessage).addListener(ChannelFutureListener.CLOSE);
}
throw ex;
}
private static int toFrameLength(long l) {
if (l > Integer.MAX_VALUE) {
throw new TooLongFrameException("frame length exceeds " + Integer.MAX_VALUE + ": " + l);
} else {
return (int) l;
}
}
/** */
protected void checkCloseFrameBody(
ChannelHandlerContext ctx, ByteBuf buffer) {
if (buffer == null || !buffer.isReadable()) {
return;
}
if (buffer.readableBytes() < 2) {
protocolViolation(ctx, buffer, WebSocketCloseStatus.INVALID_PAYLOAD_DATA, "Invalid close frame body");
}
// Must have 2 byte integer within the valid range
int statusCode = buffer.getShort(buffer.readerIndex());
if (!WebSocketCloseStatus.isValidStatusCode(statusCode)) {
protocolViolation(ctx, buffer, "Invalid close frame getStatus code: " + statusCode);
}
// May have UTF-8 message
if (buffer.readableBytes() > 2) {
try {
new Utf8Validator().check(buffer, buffer.readerIndex() + 2, buffer.readableBytes() - 2);
} catch (CorruptedWebSocketFrameException ex) {
protocolViolation(ctx, buffer, ex);
}
}
}
}
|
State
|
java
|
google__dagger
|
javatests/dagger/functional/producers/GenericComponentTest.java
|
{
"start": 1311,
"end": 1423
}
|
class ____ {
@ProductionComponent(modules = {ExecutorModule.class, NongenericModule.class})
|
GenericComponentTest
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/transport/RemoteConnectionInfo.java
|
{
"start": 4538,
"end": 4730
}
|
interface ____ extends ToXContentFragment, Writeable {
boolean isConnected();
String modeName();
RemoteConnectionStrategy.ConnectionStrategy modeType();
}
}
|
ModeInfo
|
java
|
apache__flink
|
flink-models/flink-model-openai/src/main/java/org/apache/flink/model/openai/OpenAIOptions.java
|
{
"start": 1259,
"end": 10981
}
|
class ____ {
// ------------------------------------------------------------------------
// Common Options
// ------------------------------------------------------------------------
@Documentation.Section({Documentation.Sections.MODEL_OPENAI_COMMON})
public static final ConfigOption<String> ENDPOINT =
ConfigOptions.key("endpoint")
.stringType()
.noDefaultValue()
.withDescription(
Description.builder()
.text(
"Full URL of the OpenAI API endpoint, e.g., %s or %s",
code("https://api.openai.com/v1/chat/completions"),
code("https://api.openai.com/v1/embeddings"))
.build());
@Documentation.Section({Documentation.Sections.MODEL_OPENAI_COMMON})
public static final ConfigOption<String> API_KEY =
ConfigOptions.key("api-key")
.stringType()
.noDefaultValue()
.withDescription("OpenAI API key for authentication.");
@Documentation.Section({Documentation.Sections.MODEL_OPENAI_COMMON})
public static final ConfigOption<String> MODEL =
ConfigOptions.key("model")
.stringType()
.noDefaultValue()
.withDescription(
Description.builder()
.text(
"Model name, e.g., %s, %s.",
code("gpt-3.5-turbo"), code("text-embedding-ada-002"))
.build());
@Documentation.Section({Documentation.Sections.MODEL_OPENAI_COMMON})
public static final ConfigOption<Integer> MAX_CONTEXT_SIZE =
ConfigOptions.key("max-context-size")
.intType()
.noDefaultValue()
.withDescription(
"Max number of tokens for context. context-overflow-action would be triggered if this threshold is exceeded.");
@Documentation.Section({Documentation.Sections.MODEL_OPENAI_COMMON})
public static final ConfigOption<ContextOverflowAction> CONTEXT_OVERFLOW_ACTION =
ConfigOptions.key("context-overflow-action")
.enumType(ContextOverflowAction.class)
.defaultValue(ContextOverflowAction.TRUNCATED_TAIL)
.withDescription(
Description.builder()
.text("Action to handle context overflows.")
.build());
@Documentation.Section({Documentation.Sections.MODEL_OPENAI_COMMON})
public static final ConfigOption<AbstractOpenAIModelFunction.ErrorHandlingStrategy>
ERROR_HANDLING_STRATEGY =
ConfigOptions.key("error-handling-strategy")
.enumType(AbstractOpenAIModelFunction.ErrorHandlingStrategy.class)
.defaultValue(AbstractOpenAIModelFunction.ErrorHandlingStrategy.RETRY)
.withDescription("Strategy for handling errors during model requests.");
// The model service enforces rate-limiting constraints, necessitating retry mechanisms in
// most operational scenarios.
@Documentation.Section({Documentation.Sections.MODEL_OPENAI_COMMON})
public static final ConfigOption<Integer> RETRY_NUM =
ConfigOptions.key("retry-num")
.intType()
.defaultValue(100)
.withDescription("Number of retry for OpenAI client requests.");
@Documentation.Section({Documentation.Sections.MODEL_OPENAI_COMMON})
public static final ConfigOption<AbstractOpenAIModelFunction.RetryFallbackStrategy>
RETRY_FALLBACK_STRATEGY =
ConfigOptions.key("retry-fallback-strategy")
.enumType(AbstractOpenAIModelFunction.RetryFallbackStrategy.class)
.defaultValue(
AbstractOpenAIModelFunction.RetryFallbackStrategy.FAILOVER)
.withDescription(
"Fallback strategy to employ if the retry attempts are exhausted."
+ " This strategy is applied when error-handling-strategy is set to retry.");
// ------------------------------------------------------------------------
// Options for Chat Completion Model Functions
// ------------------------------------------------------------------------
@Documentation.Section({Documentation.Sections.MODEL_OPENAI_CHAT})
public static final ConfigOption<String> SYSTEM_PROMPT =
ConfigOptions.key("system-prompt")
.stringType()
.defaultValue("You are a helpful assistant.")
.withDeprecatedKeys("systemPrompt")
.withDescription("The system message of a chat.");
@Documentation.Section({Documentation.Sections.MODEL_OPENAI_CHAT})
public static final ConfigOption<Double> TEMPERATURE =
ConfigOptions.key("temperature")
.doubleType()
.noDefaultValue()
.withDescription(
"Controls the randomness or “creativity” of the output. Typical values are between 0.0 and 1.0.");
@Documentation.Section({Documentation.Sections.MODEL_OPENAI_CHAT})
public static final ConfigOption<Double> TOP_P =
ConfigOptions.key("top-p")
.doubleType()
.noDefaultValue()
.withDeprecatedKeys("topP")
.withDescription(
"The probability cutoff for token selection. Usually, either temperature or topP are specified, but not both.");
@Documentation.Section({Documentation.Sections.MODEL_OPENAI_CHAT})
public static final ConfigOption<String> STOP =
ConfigOptions.key("stop")
.stringType()
.noDefaultValue()
.withDescription(
"A CSV list of strings to pass as stop sequences to the model.");
@Documentation.Section({Documentation.Sections.MODEL_OPENAI_CHAT})
public static final ConfigOption<Long> MAX_TOKENS =
ConfigOptions.key("max-tokens")
.longType()
.noDefaultValue()
.withDeprecatedKeys("maxTokens")
.withDescription(
"The maximum number of tokens that can be generated in the chat completion.");
@Documentation.Section({Documentation.Sections.MODEL_OPENAI_CHAT})
public static final ConfigOption<Double> PRESENCE_PENALTY =
ConfigOptions.key("presence-penalty")
.doubleType()
.noDefaultValue()
.withDescription(
"Number between -2.0 and 2.0."
+ " Positive values penalize new tokens based on whether they appear in the text so far,"
+ " increasing the model's likelihood to talk about new topics.");
@Documentation.Section({Documentation.Sections.MODEL_OPENAI_CHAT})
public static final ConfigOption<Long> N =
ConfigOptions.key("n")
.longType()
.noDefaultValue()
.withDescription(
"How many chat completion choices to generate for each input message."
+ " Note that you will be charged based on the number of generated tokens across all of the choices."
+ " Keep n as 1 to minimize costs.");
@Documentation.Section({Documentation.Sections.MODEL_OPENAI_CHAT})
public static final ConfigOption<Long> SEED =
ConfigOptions.key("seed")
.longType()
.noDefaultValue()
.withDescription(
"If specified, the model platform will make a best effort to sample deterministically,"
+ " such that repeated requests with the same seed and parameters should return the same result."
+ " Determinism is not guaranteed.");
@Documentation.Section({Documentation.Sections.MODEL_OPENAI_CHAT})
public static final ConfigOption<OpenAIChatModelFunction.ChatModelResponseFormat>
RESPONSE_FORMAT =
ConfigOptions.key("response-format")
.enumType(OpenAIChatModelFunction.ChatModelResponseFormat.class)
.noDefaultValue()
.withDescription(
"The format of the response, e.g., 'text' or 'json_object'.");
// ------------------------------------------------------------------------
// Options for Embedding Model Functions
// ------------------------------------------------------------------------
@Documentation.Section({Documentation.Sections.MODEL_OPENAI_EMBEDDING})
public static final ConfigOption<Long> DIMENSION =
ConfigOptions.key("dimension")
.longType()
.noDefaultValue()
.withDescription("The size of the embedding result array.");
}
|
OpenAIOptions
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java
|
{
"start": 3197,
"end": 6795
}
|
class ____ extends FieldMapper.Builder {
final Parameter<Boolean> fixed = Parameter.boolParam("fixed", false, m -> toType(m).fixed, true);
final Parameter<Boolean> fixed2 = Parameter.boolParam("fixed2", false, m -> toType(m).fixed2, false)
.addDeprecatedName("fixed2_old");
final Parameter<String> variable = Parameter.stringParam("variable", true, m -> toType(m).variable, "default").acceptsNull();
final Parameter<StringWrapper> wrapper = new Parameter<>("wrapper", false, () -> new StringWrapper("default"), (n, c, o) -> {
if (o == null) return null;
return new StringWrapper(o.toString());
}, m -> toType(m).wrapper, (b, n, v) -> b.field(n, v.name), v -> "wrapper_" + v.name);
final Parameter<Integer> intValue = Parameter.intParam("int_value", true, m -> toType(m).intValue, 5).addValidator(n -> {
if (n > 50) {
throw new IllegalArgumentException("Value of [n] cannot be greater than 50");
}
}).addValidator(n -> {
if (n < 0) {
throw new IllegalArgumentException("Value of [n] cannot be less than 0");
}
}).setMergeValidator((o, n, c) -> n >= o);
final Parameter<NamedAnalyzer> analyzer = Parameter.analyzerParam(
"analyzer",
false,
m -> toType(m).analyzer,
() -> Lucene.KEYWORD_ANALYZER
);
final Parameter<NamedAnalyzer> searchAnalyzer = Parameter.analyzerParam(
"search_analyzer",
true,
m -> toType(m).searchAnalyzer,
analyzer::getValue
);
final Parameter<Boolean> index = Parameter.boolParam("index", false, m -> toType(m).index, true);
final Parameter<String> required = Parameter.stringParam("required", true, m -> toType(m).required, null).addValidator(value -> {
if (value == null) {
throw new IllegalArgumentException("field [required] must be specified");
}
});
final Parameter<DummyEnumType> enumField = Parameter.enumParam(
"enum_field",
true,
m -> toType(m).enumField,
DummyEnumType.NAME1,
DummyEnumType.class
);
final Parameter<DummyEnumType> restrictedEnumField = Parameter.restrictedEnumParam(
"restricted_enum_field",
true,
m -> toType(m).restrictedEnumField,
DummyEnumType.NAME1,
DummyEnumType.class,
EnumSet.of(DummyEnumType.NAME1, DummyEnumType.NAME2)
);
protected Builder(String name) {
super(name);
// only output search analyzer if different to analyzer
searchAnalyzer.setSerializerCheck(
(id, ic, v) -> Objects.equals(analyzer.getValue().name(), searchAnalyzer.getValue().name()) == false
);
}
@Override
protected Parameter<?>[] getParameters() {
return new Parameter<?>[] {
fixed,
fixed2,
variable,
index,
wrapper,
intValue,
analyzer,
searchAnalyzer,
required,
enumField,
restrictedEnumField };
}
@Override
public FieldMapper build(MapperBuilderContext context) {
return new TestMapper(leafName(), context.buildFullName(leafName()), builderParams(this, context), this);
}
}
public static
|
Builder
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/hashcode/ListHashcodeChangeTest.java
|
{
"start": 1298,
"end": 3429
}
|
class ____ {
private Integer authorId;
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
// Revision 1
scope.inTransaction( em -> {
final Author author = new Author();
author.setFirstName( "TestFirstName" );
author.setLastName( "lastName" );
author.addBook( createBook1() );
author.addBook( createBook2() );
em.persist( author );
authorId = author.getId();
} );
// Revision 2
// Removes all books and re-adds original 2 plus one new book
scope.inTransaction( em -> {
final Author author = em.find( Author.class, authorId );
author.removeAllBooks();
author.addBook( createBook1() );
author.addBook( createBook2() );
author.addBook( createBook3() );
em.merge( author );
} );
}
@Test
// tests that Author has 3 books.
public void testAuthorState(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final Author author = em.find( Author.class, authorId );
assertNotNull( author );
assertEquals( 3, author.getBooks().size() );
} );
}
@Test
public void testAuthorLastRevision(EntityManagerFactoryScope scope) {
// tests that Author has 3 books, Book1, Book2, and Book3.
// where Book1 and Book2 were removed and re-added with the addition of Book3.
scope.inEntityManager( entityManager -> {
final var reader = AuditReaderFactory.get( entityManager );
final List<Number> revisions = reader.getRevisions( Author.class, authorId );
final Number lastRevision = revisions.get( revisions.size() - 1 );
final Author author = (Author) reader.createQuery()
.forEntitiesAtRevision( Author.class, lastRevision )
.getSingleResult();
assertNotNull( author );
assertEquals( 3, author.getBooks().size() );
} );
}
private Book createBook1() {
Book book = new Book();
book.setTitle( "Book1" );
return book;
}
private Book createBook2() {
Book book = new Book();
book.setTitle( "Book2" );
return book;
}
private Book createBook3() {
Book book = new Book();
book.setTitle( "Book3" );
return book;
}
@Entity(name = "Author")
@Audited
public static
|
ListHashcodeChangeTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/notfound/LazyNotFoundOneToOneNonUpdatableNonInsertableTest.java
|
{
"start": 2106,
"end": 2686
}
|
class ____ {
@Id
private Integer id;
@OneToOne(fetch = FetchType.LAZY, cascade = CascadeType.ALL, optional = true)
@NotFound(action = NotFoundAction.IGNORE)
@JoinColumn(
name = "id",
referencedColumnName = "id",
insertable = false,
updatable = false
)
private Lazy lazy;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Lazy getLazy() {
return lazy;
}
public void setLazy(Lazy lazy) {
this.lazy = lazy;
}
}
@Entity(name = "Lazy")
@Table(name = "LAZY")
public static
|
User
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/query/QueryEnhancerFactory.java
|
{
"start": 928,
"end": 1845
}
|
interface ____ {
/**
* Returns whether this QueryEnhancerFactory supports the given {@link DeclaredQuery}.
*
* @param query the query to be enhanced and introspected.
* @return {@code true} if this QueryEnhancer supports the given query; {@code false} otherwise.
*/
boolean supports(DeclaredQuery query);
/**
* Creates a new {@link QueryEnhancer} for the given query.
*
* @param query the query to be enhanced and introspected.
* @return the query enhancer to be used.
*/
QueryEnhancer create(QueryProvider query);
/**
* Creates a new {@link QueryEnhancerFactory} for the given {@link DeclaredQuery}.
*
* @param query must not be {@literal null}.
* @return an implementation of {@link QueryEnhancer} that suits the query the most
*/
static QueryEnhancerFactory forQuery(DeclaredQuery query) {
return QueryEnhancerSelector.DEFAULT_SELECTOR.select(query);
}
}
|
QueryEnhancerFactory
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/format/support/DefaultFormattingConversionService.java
|
{
"start": 2133,
"end": 5308
}
|
class ____ extends FormattingConversionService {
private static final boolean JSR_354_PRESENT;
static {
ClassLoader classLoader = DefaultFormattingConversionService.class.getClassLoader();
JSR_354_PRESENT = ClassUtils.isPresent("javax.money.MonetaryAmount", classLoader);
}
/**
* Create a new {@code DefaultFormattingConversionService} with the set of
* {@linkplain DefaultConversionService#addDefaultConverters default converters} and
* {@linkplain #addDefaultFormatters default formatters}.
*/
public DefaultFormattingConversionService() {
this(null, true);
}
/**
* Create a new {@code DefaultFormattingConversionService} with the set of
* {@linkplain DefaultConversionService#addDefaultConverters default converters} and,
* based on the value of {@code registerDefaultFormatters}, the set of
* {@linkplain #addDefaultFormatters default formatters}.
* @param registerDefaultFormatters whether to register default formatters
*/
public DefaultFormattingConversionService(boolean registerDefaultFormatters) {
this(null, registerDefaultFormatters);
}
/**
* Create a new {@code DefaultFormattingConversionService} with the set of
* {@linkplain DefaultConversionService#addDefaultConverters default converters} and,
* based on the value of {@code registerDefaultFormatters}, the set of
* {@linkplain #addDefaultFormatters default formatters}.
* @param embeddedValueResolver delegated to {@link #setEmbeddedValueResolver(StringValueResolver)}
* prior to calling {@link #addDefaultFormatters}.
* @param registerDefaultFormatters whether to register default formatters
*/
public DefaultFormattingConversionService(
@Nullable StringValueResolver embeddedValueResolver, boolean registerDefaultFormatters) {
if (embeddedValueResolver != null) {
setEmbeddedValueResolver(embeddedValueResolver);
}
DefaultConversionService.addDefaultConverters(this);
if (registerDefaultFormatters) {
addDefaultFormatters(this);
}
}
/**
* Add formatters appropriate for most environments: including number formatters,
* JSR-354 Money & Currency formatters, and JSR-310 Date-Time formatters,
* depending on the presence of the corresponding API on the classpath.
* @param formatterRegistry the service to register default formatters with
*/
public static void addDefaultFormatters(FormatterRegistry formatterRegistry) {
// Default handling of number values
formatterRegistry.addFormatterForFieldAnnotation(new NumberFormatAnnotationFormatterFactory());
// Default handling of monetary values
if (JSR_354_PRESENT) {
formatterRegistry.addFormatter(new CurrencyUnitFormatter());
formatterRegistry.addFormatter(new MonetaryAmountFormatter());
formatterRegistry.addFormatterForFieldAnnotation(new Jsr354NumberFormatAnnotationFormatterFactory());
}
// Default handling of date-time values
// just handling JSR-310 specific date and time types
new DateTimeFormatterRegistrar().registerFormatters(formatterRegistry);
// regular DateFormat-based Date, Calendar, Long converters
new DateFormatterRegistrar().registerFormatters(formatterRegistry);
}
}
|
DefaultFormattingConversionService
|
java
|
playframework__playframework
|
core/play-java/src/test/java/play/libs/TimeTest.java
|
{
"start": 278,
"end": 3835
}
|
class ____ {
static final int oneSecond = 1;
static final int oneMinute = 60;
static final int oneHour = oneMinute * 60;
static final int oneDay = oneHour * 24;
static final int thirtyDays = oneDay * 30;
@Test
public void testDefaultTime() {
int result = Time.parseDuration(null);
assertEquals(thirtyDays, result);
}
@Test
public void testSeconds() {
int result1 = Time.parseDuration("1s");
assertEquals(oneSecond, result1);
int result2 = Time.parseDuration("100s");
assertEquals(oneSecond * 100, result2);
try {
Time.parseDuration("1S");
Assert.fail("Should have thrown an IllegalArgumentException");
} catch (IllegalArgumentException iae) {
assertEquals("Invalid duration pattern : " + "1S", iae.getMessage());
}
try {
Time.parseDuration("100S");
Assert.fail("Should have thrown an IllegalArgumentException");
} catch (IllegalArgumentException iae) {
assertEquals("Invalid duration pattern : " + "100S", iae.getMessage());
}
}
@Test
public void testMinutes() {
int result1 = Time.parseDuration("1mn");
assertEquals(oneMinute, result1);
int result2 = Time.parseDuration("100mn");
assertEquals(oneMinute * 100, result2);
int result3 = Time.parseDuration("1min");
assertEquals(oneMinute, result3);
int result4 = Time.parseDuration("100min");
assertEquals(oneMinute * 100, result4);
try {
Time.parseDuration("1MIN");
Assert.fail("Should have thrown an IllegalArgumentException");
} catch (IllegalArgumentException iae) {
assertEquals("Invalid duration pattern : " + "1MIN", iae.getMessage());
}
try {
Time.parseDuration("100MN");
Assert.fail("Should have thrown an IllegalArgumentException");
} catch (IllegalArgumentException iae) {
assertEquals("Invalid duration pattern : " + "100MN", iae.getMessage());
}
try {
Time.parseDuration("100mN");
Assert.fail("Should have thrown an IllegalArgumentException");
} catch (IllegalArgumentException iae) {
assertEquals("Invalid duration pattern : " + "100mN", iae.getMessage());
}
}
@Test
public void testHours() {
int result1 = Time.parseDuration("1h");
assertEquals(oneHour, result1);
int result2 = Time.parseDuration("100h");
assertEquals(oneHour * 100, result2);
try {
Time.parseDuration("1H");
Assert.fail("Should have thrown an IllegalArgumentException");
} catch (IllegalArgumentException iae) {
assertEquals("Invalid duration pattern : " + "1H", iae.getMessage());
}
try {
Time.parseDuration("100H");
Assert.fail("Should have thrown an IllegalArgumentException");
} catch (IllegalArgumentException iae) {
assertEquals("Invalid duration pattern : " + "100H", iae.getMessage());
}
}
@Test
public void testDays() {
int result1 = Time.parseDuration("1d");
assertEquals(oneDay, result1);
int result2 = Time.parseDuration("100d");
assertEquals(oneDay * 100, result2);
try {
Time.parseDuration("1D");
Assert.fail("Should have thrown an IllegalArgumentException");
} catch (IllegalArgumentException iae) {
assertEquals("Invalid duration pattern : " + "1D", iae.getMessage());
}
try {
Time.parseDuration("100D");
Assert.fail("Should have thrown an IllegalArgumentException");
} catch (IllegalArgumentException iae) {
assertEquals("Invalid duration pattern : " + "100D", iae.getMessage());
}
}
}
|
TimeTest
|
java
|
quarkusio__quarkus
|
extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/singlepersistenceunit/SinglePersistenceUnitCdiCacheTest.java
|
{
"start": 647,
"end": 3951
}
|
class ____ {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClass(DefaultEntity.class)
.addClass(TransactionTestUtils.class)
.addAsResource("application.properties"))
.overrideRuntimeConfigKey("quarkus.hibernate-orm.second-level-caching-enabled", "true");
@Inject
Cache jakartaCache;
@Inject
org.hibernate.Cache hibernateCache;
@Inject
EntityManager em;
@Inject
UserTransaction tx;
@Test
public void testJakartaCacheOperations() {
DefaultEntity entity = new DefaultEntity("test");
TransactionTestUtils.inTransaction(tx, () -> {
em.persist(entity);
em.flush();
});
TransactionTestUtils.inTransaction(tx, () -> {
DefaultEntity loaded = em.find(DefaultEntity.class, entity.getId());
assertNotNull(loaded, "Entity should be loaded");
// Verify entity is in cache
assertTrue(jakartaCache.contains(DefaultEntity.class, entity.getId()),
"Entity should be in cache after load");
// Test cache operations
DefaultEntity fromCache = em.find(DefaultEntity.class, entity.getId());
assertNotNull(fromCache, "Entity should be retrieved from cache");
assertEquals("test", fromCache.getName(), "Entity data should match");
jakartaCache.evict(DefaultEntity.class, entity.getId());
assertFalse(jakartaCache.contains(DefaultEntity.class, entity.getId()),
"Entity should not be in cache after eviction");
DefaultEntity fromDatabase = em.find(DefaultEntity.class, entity.getId());
assertNotNull(fromDatabase, "Entity should be retrievable from database after cache eviction");
});
}
@Test
public void testHibernateCacheOperations() {
DefaultEntity entity = new DefaultEntity("test");
TransactionTestUtils.inTransaction(tx, () -> {
em.persist(entity);
em.flush();
});
TransactionTestUtils.inTransaction(tx, () -> {
DefaultEntity loaded = em.find(DefaultEntity.class, entity.getId());
assertNotNull(loaded, "Entity should be loaded");
// Verify entity is in cache
assertTrue(hibernateCache.contains(DefaultEntity.class, entity.getId()),
"Entity should be in cache after load");
// Test cache operations
DefaultEntity fromCache = em.find(DefaultEntity.class, entity.getId());
assertNotNull(fromCache, "Entity should be retrieved from cache");
assertEquals("test", fromCache.getName(), "Entity data should match");
hibernateCache.evict(DefaultEntity.class, entity.getId());
assertFalse(hibernateCache.contains(DefaultEntity.class, entity.getId()),
"Entity should not be in cache after eviction");
DefaultEntity fromDatabase = em.find(DefaultEntity.class, entity.getId());
assertNotNull(fromDatabase, "Entity should be retrievable from database after cache eviction");
});
}
}
|
SinglePersistenceUnitCdiCacheTest
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/select/MySqlSelectTest_248.java
|
{
"start": 848,
"end": 1191
}
|
class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "select top.* from t1 top";
SQLSelectStatement stmt = (SQLSelectStatement) SQLUtils.parseSingleStatement(sql, DbType.mysql);
assertEquals("SELECT top.*\n" +
"FROM t1 top", stmt.toString());
}
}
|
MySqlSelectTest_248
|
java
|
elastic__elasticsearch
|
x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/plain/CartesianPointIndexFieldData.java
|
{
"start": 1194,
"end": 3154
}
|
class ____ extends AbstractPointIndexFieldData<CartesianPointValuesSource.MultiCartesianPointValues>
implements
IndexCartesianPointFieldData {
public CartesianPointIndexFieldData(
String fieldName,
ValuesSourceType valuesSourceType,
ToScriptFieldFactory<CartesianPointValuesSource.MultiCartesianPointValues> toScriptFieldFactory
) {
super(fieldName, valuesSourceType, toScriptFieldFactory);
}
@Override
public LeafPointFieldData<CartesianPointValuesSource.MultiCartesianPointValues> load(LeafReaderContext context) {
LeafReader reader = context.reader();
FieldInfo info = reader.getFieldInfos().fieldInfo(fieldName);
if (info != null) {
checkCompatible(info);
}
return new CartesianPointDVLeafFieldData(reader, fieldName, toScriptFieldFactory);
}
@Override
public LeafPointFieldData<CartesianPointValuesSource.MultiCartesianPointValues> loadDirect(LeafReaderContext context) {
return load(context);
}
/** helper: checks a fieldinfo and throws exception if it's definitely not a LatLonDocValuesField */
static void checkCompatible(FieldInfo fieldInfo) {
// dv properties could be "unset", if you e.g. used only StoredField with this same name in the segment.
if (fieldInfo.getDocValuesType() != DocValuesType.NONE
&& fieldInfo.getDocValuesType() != LatLonDocValuesField.TYPE.docValuesType()) {
throw new IllegalArgumentException(
"field=\""
+ fieldInfo.name
+ "\" was indexed with docValuesType="
+ fieldInfo.getDocValuesType()
+ " but this type has docValuesType="
+ LatLonDocValuesField.TYPE.docValuesType()
+ ", is the field really a LatLonDocValuesField?"
);
}
}
public static
|
CartesianPointIndexFieldData
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/stream/StreamExecDropUpdateBefore.java
|
{
"start": 2666,
"end": 4782
}
|
class ____ extends ExecNodeBase<RowData>
implements StreamExecNode<RowData>, SingleTransformationTranslator<RowData> {
public static final String DROP_UPDATE_BEFORE_TRANSFORMATION = "drop-update-before";
public StreamExecDropUpdateBefore(
ReadableConfig tableConfig,
InputProperty inputProperty,
RowType outputType,
String description) {
this(
ExecNodeContext.newNodeId(),
ExecNodeContext.newContext(StreamExecDropUpdateBefore.class),
ExecNodeContext.newPersistedConfig(StreamExecDropUpdateBefore.class, tableConfig),
Collections.singletonList(inputProperty),
outputType,
description);
}
@JsonCreator
public StreamExecDropUpdateBefore(
@JsonProperty(FIELD_NAME_ID) int id,
@JsonProperty(FIELD_NAME_TYPE) ExecNodeContext context,
@JsonProperty(FIELD_NAME_CONFIGURATION) ReadableConfig persistedConfig,
@JsonProperty(FIELD_NAME_INPUT_PROPERTIES) List<InputProperty> inputProperties,
@JsonProperty(FIELD_NAME_OUTPUT_TYPE) RowType outputType,
@JsonProperty(FIELD_NAME_DESCRIPTION) String description) {
super(id, context, persistedConfig, inputProperties, outputType, description);
}
@SuppressWarnings("unchecked")
@Override
protected Transformation<RowData> translateToPlanInternal(
PlannerBase planner, ExecNodeConfig config) {
final Transformation<RowData> inputTransform =
(Transformation<RowData>) getInputEdges().get(0).translateToPlan(planner);
final StreamFilter<RowData> operator = new StreamFilter<>(new DropUpdateBeforeFunction());
return ExecNodeUtil.createOneInputTransformation(
inputTransform,
createTransformationMeta(DROP_UPDATE_BEFORE_TRANSFORMATION, config),
operator,
inputTransform.getOutputType(),
inputTransform.getParallelism(),
false);
}
}
|
StreamExecDropUpdateBefore
|
java
|
quarkusio__quarkus
|
extensions/redis-client/runtime/src/main/java/io/quarkus/redis/runtime/datasource/ReactiveTransactionalBloomCommandsImpl.java
|
{
"start": 405,
"end": 2691
}
|
class ____<K, V> extends AbstractTransactionalCommands
implements ReactiveTransactionalBloomCommands<K, V> {
private final ReactiveBloomCommandsImpl<K, V> reactive;
public ReactiveTransactionalBloomCommandsImpl(ReactiveTransactionalRedisDataSource ds,
ReactiveBloomCommandsImpl<K, V> reactive, TransactionHolder tx) {
super(ds, tx);
this.reactive = reactive;
}
@Override
public Uni<Void> bfadd(K key, V value) {
this.tx.enqueue(Response::toBoolean);
return this.reactive._bfadd(key, value)
.invoke(this::queuedOrDiscard)
.replaceWithVoid();
}
@Override
public Uni<Void> bfexists(K key, V value) {
this.tx.enqueue(Response::toBoolean);
return this.reactive._bfexists(key, value)
.invoke(this::queuedOrDiscard)
.replaceWithVoid();
}
@Override
public Uni<Void> bfmadd(K key, V... values) {
this.tx.enqueue(ReactiveBloomCommandsImpl::decodeAsListOfBooleans);
return this.reactive._bfmadd(key, values)
.invoke(this::queuedOrDiscard)
.replaceWithVoid();
}
@Override
public Uni<Void> bfmexists(K key, V... values) {
this.tx.enqueue(ReactiveBloomCommandsImpl::decodeAsListOfBooleans);
return this.reactive._bfmexists(key, values)
.invoke(this::queuedOrDiscard)
.replaceWithVoid();
}
@Override
public Uni<Void> bfreserve(K key, double errorRate, long capacity) {
return bfreserve(key, errorRate, capacity, new BfReserveArgs());
}
@Override
public Uni<Void> bfreserve(K key, double errorRate, long capacity, BfReserveArgs args) {
this.tx.enqueue(r -> null);
return this.reactive._bfreserve(key, errorRate, capacity, args)
.invoke(this::queuedOrDiscard)
.replaceWithVoid();
}
@Override
public Uni<Void> bfinsert(K key, BfInsertArgs args, V... values) {
this.tx.enqueue(ReactiveBloomCommandsImpl::decodeAsListOfBooleans);
return this.reactive._bfinsert(key, args, values)
.invoke(this::queuedOrDiscard)
.replaceWithVoid();
}
}
|
ReactiveTransactionalBloomCommandsImpl
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/groovy/GroovyBeanDefinitionReaderTests.java
|
{
"start": 30634,
"end": 30787
}
|
interface ____ {
void birthday();
int getAge();
void setAge(int age);
String getName();
void setName(String name);
}
@Component("person")
|
AdvisedPerson
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/query/JpqlQueryBuilder.java
|
{
"start": 21622,
"end": 22858
}
|
class ____ {
public static final RenderContext EMPTY = new RenderContext(Collections.emptyMap()) {
@Override
public String getAlias(Origin source) {
return "";
}
};
private final Map<Origin, String> aliases;
private int counter;
RenderContext(Map<Origin, String> aliases) {
this.aliases = aliases;
}
/**
* Obtain an alias for {@link Origin}. Unknown selection origins are associated with the enclosing statement if they
* are used for the first time.
*
* @param source
* @return
*/
public String getAlias(Origin source) {
return aliases.computeIfAbsent(source, it -> JpqlQueryBuilder.getAlias(source.getName(),
s -> !aliases.containsValue(s), () -> "join_" + (counter++)));
}
/**
* Prefix {@code fragment} with the alias for {@link Origin}. Unknown selection origins are associated with the
* enclosing statement if they are used for the first time.
*
* @param source
* @return
*/
public String prefixWithAlias(Origin source, String fragment) {
String alias = getAlias(source);
return ObjectUtils.isEmpty(source) ? fragment : alias + "." + fragment;
}
public boolean isConstructorContext() {
return false;
}
}
static
|
RenderContext
|
java
|
spring-projects__spring-boot
|
module/spring-boot-session/src/main/java/org/springframework/boot/session/autoconfigure/SessionProperties.java
|
{
"start": 1361,
"end": 2316
}
|
class ____ {
/**
* Session timeout. If a duration suffix is not specified, seconds will be used.
*/
@DurationUnit(ChronoUnit.SECONDS)
private @Nullable Duration timeout;
private Servlet servlet = new Servlet();
public @Nullable Duration getTimeout() {
return this.timeout;
}
public void setTimeout(@Nullable Duration timeout) {
this.timeout = timeout;
}
public Servlet getServlet() {
return this.servlet;
}
public void setServlet(Servlet servlet) {
this.servlet = servlet;
}
/**
* Determine the session timeout. If no timeout is configured, the
* {@code fallbackTimeout} is used.
* @param fallbackTimeout a fallback timeout value if the timeout isn't configured
* @return the session timeout
*/
public Duration determineTimeout(Supplier<Duration> fallbackTimeout) {
return (this.timeout != null) ? this.timeout : fallbackTimeout.get();
}
/**
* Servlet-related properties.
*/
public static
|
SessionProperties
|
java
|
resilience4j__resilience4j
|
resilience4j-spring-boot2/src/main/java/io/github/resilience4j/retry/autoconfigure/RetryAutoConfiguration.java
|
{
"start": 2032,
"end": 2530
}
|
class ____ {
@Bean
@ConditionalOnAvailableEndpoint
public RetryEndpoint retryEndpoint(RetryRegistry retryRegistry) {
return new RetryEndpoint(retryRegistry);
}
@Bean
@ConditionalOnAvailableEndpoint
public RetryEventsEndpoint retryEventsEndpoint(
EventConsumerRegistry<RetryEvent> eventConsumerRegistry) {
return new RetryEventsEndpoint(eventConsumerRegistry);
}
}
}
|
RetryAutoEndpointConfiguration
|
java
|
spring-projects__spring-security
|
cas/src/test/java/org/springframework/security/cas/web/CasGatewayAuthenticationRedirectFilterTests.java
|
{
"start": 1537,
"end": 3864
}
|
class ____ {
private static final String CAS_LOGIN_URL = "http://mycasserver/login";
CasGatewayAuthenticationRedirectFilter filter = new CasGatewayAuthenticationRedirectFilter(CAS_LOGIN_URL,
serviceProperties());
@Test
void doFilterWhenMatchesThenSavesRequestAndSavesAttributeAndSendRedirect() throws IOException, ServletException {
RequestCache requestCache = mock();
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
this.filter.setRequestMatcher((req) -> true);
this.filter.setRequestCache(requestCache);
this.filter.doFilter(request, response, new MockFilterChain());
assertThat(response.getStatus()).isEqualTo(HttpStatus.FOUND.value());
assertThat(response.getHeader("Location"))
.isEqualTo("http://mycasserver/login?service=http%3A%2F%2Flocalhost%2Flogin%2Fcas&gateway=true");
verify(requestCache).saveRequest(request, response);
}
@Test
void doFilterWhenNotMatchThenContinueFilter() throws ServletException, IOException {
this.filter.setRequestMatcher((req) -> false);
FilterChain chain = mock();
MockHttpServletResponse response = mock();
this.filter.doFilter(new MockHttpServletRequest(), response, chain);
verify(chain).doFilter(any(), any());
verifyNoInteractions(response);
}
@Test
void doFilterWhenSendRenewTrueThenIgnores() throws ServletException, IOException {
ServiceProperties serviceProperties = serviceProperties();
serviceProperties.setSendRenew(true);
this.filter = new CasGatewayAuthenticationRedirectFilter(CAS_LOGIN_URL, serviceProperties);
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
this.filter.setRequestMatcher((req) -> true);
this.filter.doFilter(request, response, new MockFilterChain());
assertThat(response.getStatus()).isEqualTo(HttpStatus.FOUND.value());
assertThat(response.getHeader("Location"))
.isEqualTo("http://mycasserver/login?service=http%3A%2F%2Flocalhost%2Flogin%2Fcas&gateway=true");
}
private static ServiceProperties serviceProperties() {
ServiceProperties serviceProperties = new ServiceProperties();
serviceProperties.setService("http://localhost/login/cas");
return serviceProperties;
}
}
|
CasGatewayAuthenticationRedirectFilterTests
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/issue_1200/Issue1265.java
|
{
"start": 245,
"end": 797
}
|
class ____ extends TestCase {
public void test_0() throws Exception {
Object t = JSON.parseObject("{\"value\":{\"id\":123}}", new TypeReference<Response>(){}).value;
assertEquals(123, ((JSONObject) t).getIntValue("id"));
T1 t1 = JSON.parseObject("{\"value\":{\"id\":123}}", new TypeReference<Response<T1>>(){}).value;
assertEquals(123, t1.id);
T2 t2 = JSON.parseObject("{\"value\":{\"id\":123}}", new TypeReference<Response<T2>>(){}).value;
assertEquals(123, t2.id);
}
public static
|
Issue1265
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationOperation.java
|
{
"start": 2117,
"end": 2410
}
|
enum ____ represents that string
*/
public static AggregationOperation getAggregationOperation(String aggOpStr) {
for (AggregationOperation aggOp : AggregationOperation.values()) {
if (aggOp.name().equals(aggOpStr)) {
return aggOp;
}
}
return null;
}
}
|
that
|
java
|
apache__flink
|
flink-rpc/flink-rpc-akka/src/test/java/org/apache/flink/runtime/rpc/pekko/PekkoRpcActorTest.java
|
{
"start": 35751,
"end": 36083
}
|
interface ____ extends RpcGateway {
@Local
void schedule(
final CompletableFuture<Void> scheduleRunnableFuture,
final CompletableFuture<Void> scheduleCallableFuture,
final CompletableFuture<Void> executeFuture);
}
private static final
|
SchedulingRpcEndpointGateway
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractCreate.java
|
{
"start": 1992,
"end": 4764
}
|
class ____ extends AbstractContractCreateTest {
/**
* This test suite is parameterized for the different create file
* options.
* @return a list of test parameters.
*/
public static Collection<Object[]> params() {
return Arrays.asList(new Object[][]{
{false, false},
{true, true}
});
}
/**
* Is this test run in create performance mode?
*/
private final boolean createPerformance;
/**
* Expect a 100-continue response?
*/
private final boolean expectContinue;
public ITestS3AContractCreate(final boolean createPerformance,
final boolean expectContinue) {
this.createPerformance = createPerformance;
this.expectContinue = expectContinue;
}
@Override
protected AbstractFSContract createContract(Configuration conf) {
return new S3AContract(conf);
}
@Override
protected Configuration createConfiguration() {
final Configuration conf = setPerformanceFlags(
super.createConfiguration(),
createPerformance ? "create" : "");
removeBaseAndBucketOverrides(
conf,
CONNECTION_EXPECT_CONTINUE);
conf.setBoolean(CONNECTION_EXPECT_CONTINUE, expectContinue);
if (createPerformance) {
skipIfNotEnabled(conf, KEY_PERFORMANCE_TESTS_ENABLED, "Skipping tests running in performance mode");
}
S3ATestUtils.disableFilesystemCaching(conf);
return conf;
}
@Test
public void testOverwriteNonEmptyDirectory() throws Throwable {
try {
super.testOverwriteNonEmptyDirectory();
failWithCreatePerformance();
} catch (AssertionError e) {
swallowWithCreatePerformance(e);
}
}
@Override
@Test
public void testOverwriteEmptyDirectory() throws Throwable {
try {
super.testOverwriteEmptyDirectory();
failWithCreatePerformance();
} catch (AssertionError e) {
swallowWithCreatePerformance(e);
}
}
@Test
@Override
public void testCreateFileOverExistingFileNoOverwrite() throws Throwable {
try {
super.testCreateFileOverExistingFileNoOverwrite();
failWithCreatePerformance();
} catch (AssertionError e) {
swallowWithCreatePerformance(e);
}
}
private void failWithCreatePerformance() {
if (createPerformance) {
fail("expected an assertion error in create performance mode");
}
}
/**
* Swallow an assertion error if the create performance flag is set.
* @param e assertion error
*/
private void swallowWithCreatePerformance(final AssertionError e) {
// this is expected in create performance modea
if (!createPerformance) {
// but if the create performance flag is set, then it is supported
// and the assertion error is unexpected
throw e;
}
}
}
|
ITestS3AContractCreate
|
java
|
spring-projects__spring-framework
|
spring-jms/src/test/java/org/springframework/jms/config/JmsNamespaceHandlerTests.java
|
{
"start": 19867,
"end": 19980
}
|
class ____ implements ErrorHandler {
@Override
public void handleError(Throwable t) {
}
}
}
|
TestErrorHandler
|
java
|
quarkusio__quarkus
|
extensions/resteasy-classic/rest-client-config/runtime/src/main/java/io/quarkus/restclient/config/RestClientsBuildTimeConfig.java
|
{
"start": 1228,
"end": 1342
}
|
interface ____ not registered as a bean unless it is annotated with
* {@link RegisterRestClient}.
* If an
|
is
|
java
|
quarkusio__quarkus
|
extensions/redis-client/runtime/src/main/java/io/quarkus/redis/datasource/RedisDataSource.java
|
{
"start": 26438,
"end": 26942
}
|
class ____ the values included in the message exchanged on the streams
* @param <K> the type of the redis key
* @param <F> the type of the fields (map's keys)
* @param <V> the type of the value
* @return the object to execute commands manipulating streams.
*/
<K, F, V> StreamCommands<K, F, V> stream(Class<K> redisKeyType, Class<F> fieldType, Class<V> valueType);
/**
* Gets the object to execute commands manipulating streams.
*
* @param redisKeyType the
|
of
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/state/internals/WindowKeySchema.java
|
{
"start": 1413,
"end": 10405
}
|
class ____ implements RocksDBSegmentedBytesStore.KeySchema {
private static final Logger LOG = LoggerFactory.getLogger(WindowKeySchema.class);
private static final int SEQNUM_SIZE = 4;
private static final int SUFFIX_SIZE = TIMESTAMP_SIZE + SEQNUM_SIZE;
private static final byte[] MIN_SUFFIX = new byte[SUFFIX_SIZE];
@Override
public Bytes upperRange(final Bytes key, final long to) {
if (key == null) {
return null;
}
final byte[] maxSuffix = ByteBuffer.allocate(SUFFIX_SIZE)
.putLong(to)
.putInt(Integer.MAX_VALUE)
.array();
return OrderedBytes.upperRange(key, maxSuffix);
}
@Override
public Bytes lowerRange(final Bytes key, final long from) {
if (key == null) {
return null;
}
return OrderedBytes.lowerRange(key, MIN_SUFFIX);
}
@Override
public Bytes lowerRangeFixedSize(final Bytes key, final long from) {
return WindowKeySchema.toStoreKeyBinary(key, Math.max(0, from), 0);
}
@Override
public Bytes upperRangeFixedSize(final Bytes key, final long to) {
return WindowKeySchema.toStoreKeyBinary(key, to, Integer.MAX_VALUE);
}
@Override
public long segmentTimestamp(final Bytes key) {
return WindowKeySchema.extractStoreTimestamp(key.get());
}
@Override
public HasNextCondition hasNextCondition(final Bytes binaryKeyFrom,
final Bytes binaryKeyTo,
final long from,
final long to,
final boolean forward) {
return iterator -> {
while (iterator.hasNext()) {
final Bytes bytes = iterator.peekNextKey();
final Bytes keyBytes = Bytes.wrap(WindowKeySchema.extractStoreKeyBytes(bytes.get()));
final long time = WindowKeySchema.extractStoreTimestamp(bytes.get());
if ((binaryKeyFrom == null || keyBytes.compareTo(binaryKeyFrom) >= 0)
&& (binaryKeyTo == null || keyBytes.compareTo(binaryKeyTo) <= 0)
&& time >= from
&& time <= to) {
return true;
}
iterator.next();
}
return false;
};
}
@Override
public <S extends Segment> List<S> segmentsToSearch(final Segments<S> segments,
final long from,
final long to,
final boolean forward) {
return segments.segments(from, to, forward);
}
/**
* Safely construct a time window of the given size,
* taking care of bounding endMs to Long.MAX_VALUE if necessary
*/
static TimeWindow timeWindowForSize(final long startMs,
final long windowSize) {
long endMs = startMs + windowSize;
if (endMs < 0) {
LOG.warn("Warning: window end time was truncated to Long.MAX");
endMs = Long.MAX_VALUE;
}
return new TimeWindow(startMs, endMs);
}
// for pipe serdes
public static <K> byte[] toBinary(final Windowed<K> timeKey,
final Serializer<K> serializer,
final String topic) {
final byte[] bytes = serializer.serialize(topic, timeKey.key());
final ByteBuffer buf = ByteBuffer.allocate(bytes.length + TIMESTAMP_SIZE);
buf.put(bytes);
buf.putLong(timeKey.window().start());
return buf.array();
}
public static <K> Windowed<K> from(final byte[] binaryKey,
final long windowSize,
final Deserializer<K> deserializer,
final String topic) {
final byte[] bytes = new byte[binaryKey.length - TIMESTAMP_SIZE];
System.arraycopy(binaryKey, 0, bytes, 0, bytes.length);
final K key = deserializer.deserialize(topic, bytes);
final Window window = extractWindow(binaryKey, windowSize);
return new Windowed<>(key, window);
}
private static Window extractWindow(final byte[] binaryKey,
final long windowSize) {
final ByteBuffer buffer = ByteBuffer.wrap(binaryKey);
final long start = buffer.getLong(binaryKey.length - TIMESTAMP_SIZE);
return timeWindowForSize(start, windowSize);
}
// for store serdes
public static Bytes toStoreKeyBinary(final Bytes key,
final long timestamp,
final int seqnum) {
final byte[] serializedKey = key.get();
return toStoreKeyBinary(serializedKey, timestamp, seqnum);
}
public static <K> Bytes toStoreKeyBinary(final K key,
final long timestamp,
final int seqnum,
final StateSerdes<K, ?> serdes) {
final byte[] serializedKey = serdes.rawKey(key);
return toStoreKeyBinary(serializedKey, timestamp, seqnum);
}
public static Bytes toStoreKeyBinary(final Windowed<Bytes> timeKey,
final int seqnum) {
final byte[] bytes = timeKey.key().get();
return toStoreKeyBinary(bytes, timeKey.window().start(), seqnum);
}
public static <K> Bytes toStoreKeyBinary(final Windowed<K> timeKey,
final int seqnum,
final StateSerdes<K, ?> serdes) {
final byte[] serializedKey = serdes.rawKey(timeKey.key());
return toStoreKeyBinary(serializedKey, timeKey.window().start(), seqnum);
}
// package private for testing
static Bytes toStoreKeyBinary(final byte[] serializedKey,
final long timestamp,
final int seqnum) {
final ByteBuffer buf = ByteBuffer.allocate(serializedKey.length + TIMESTAMP_SIZE + SEQNUM_SIZE);
buf.put(serializedKey);
buf.putLong(timestamp);
buf.putInt(seqnum);
return Bytes.wrap(buf.array());
}
static byte[] extractStoreKeyBytes(final byte[] binaryKey) {
final byte[] bytes = new byte[binaryKey.length - TIMESTAMP_SIZE - SEQNUM_SIZE];
System.arraycopy(binaryKey, 0, bytes, 0, bytes.length);
return bytes;
}
static <K> K extractStoreKey(final byte[] binaryKey,
final StateSerdes<K, ?> serdes) {
final byte[] bytes = new byte[binaryKey.length - TIMESTAMP_SIZE - SEQNUM_SIZE];
System.arraycopy(binaryKey, 0, bytes, 0, bytes.length);
return serdes.keyFrom(bytes);
}
static long extractStoreTimestamp(final byte[] binaryKey) {
return ByteBuffer.wrap(binaryKey).getLong(binaryKey.length - TIMESTAMP_SIZE - SEQNUM_SIZE);
}
static int extractStoreSequence(final byte[] binaryKey) {
return ByteBuffer.wrap(binaryKey).getInt(binaryKey.length - SEQNUM_SIZE);
}
public static <K> Windowed<K> fromStoreKey(final byte[] binaryKey,
final long windowSize,
final Deserializer<K> deserializer,
final String topic) {
final K key = deserializer.deserialize(topic, extractStoreKeyBytes(binaryKey));
final Window window = extractStoreWindow(binaryKey, windowSize);
return new Windowed<>(key, window);
}
public static <K> Windowed<K> fromStoreKey(final Windowed<Bytes> windowedKey,
final Deserializer<K> deserializer,
final String topic) {
final K key = deserializer.deserialize(topic, windowedKey.key().get());
return new Windowed<>(key, windowedKey.window());
}
public static Windowed<Bytes> fromStoreBytesKey(final byte[] binaryKey,
final long windowSize) {
final Bytes key = Bytes.wrap(extractStoreKeyBytes(binaryKey));
final Window window = extractStoreWindow(binaryKey, windowSize);
return new Windowed<>(key, window);
}
static Window extractStoreWindow(final byte[] binaryKey,
final long windowSize) {
final ByteBuffer buffer = ByteBuffer.wrap(binaryKey);
final long start = buffer.getLong(binaryKey.length - TIMESTAMP_SIZE - SEQNUM_SIZE);
return timeWindowForSize(start, windowSize);
}
}
|
WindowKeySchema
|
java
|
processing__processing4
|
app/src/processing/app/contrib/ListPanel.java
|
{
"start": 1671,
"end": 11546
}
|
class ____ extends JPanel implements Scrollable {
ContributionTab contributionTab;
Map<Contribution, StatusDetail> detailForContrib =
new ConcurrentHashMap<>();
private final Contribution.Filter filter;
private StatusDetail selectedDetail;
protected ContributionRowFilter rowFilter;
protected JTable table;
protected TableRowSorter<ContributionTableModel> sorter;
protected ContributionTableModel model;
// state icons appearing to the left side of the list
static final int ICON_SIZE = 16;
Icon upToDateIcon;
Icon updateAvailableIcon;
Icon incompatibleIcon;
Icon downloadingIcon;
// used in the list next to the creator name
Icon foundationIcon;
Color headerFgColor;
Color headerBgColor;
Color sectionColor;
Color rowColor;
Color textColor;
Color selectionColor;
Color textColorIncompatible;
Color selectionColorIncompatible;
JScrollPane scrollPane;
static final SectionHeaderContribution[] sectionHeaders = {
new SectionHeaderContribution(ContributionType.LIBRARY),
new SectionHeaderContribution(ContributionType.MODE),
new SectionHeaderContribution(ContributionType.TOOL),
new SectionHeaderContribution(ContributionType.EXAMPLES)
};
public ListPanel(final ContributionTab contributionTab,
final Contribution.Filter filter,
final boolean enableSections,
final ContributionColumn... columns) {
this.contributionTab = contributionTab;
this.filter = filter;
model = new ContributionTableModel(columns); /* {
@Override
public void fireTableDataChanged() {
new Exception().printStackTrace(System.out);
super.fireTableDataChanged();
}
};*/
model.enableSections(enableSections);
table = new JTable(model) {
@Override
public Component prepareRenderer(TableCellRenderer renderer, int row, int column) {
Component c = super.prepareRenderer(renderer, row, column);
Object rowValue = getValueAt(row, column);
if (rowValue instanceof SectionHeaderContribution) {
c.setBackground(sectionColor);
} else if (isRowSelected(row)) {
if (((Contribution) rowValue).isCompatible()) {
c.setBackground(selectionColor);
} else {
c.setBackground(selectionColorIncompatible);
}
} else {
c.setBackground(rowColor);
}
return c;
}
@Override
public void changeSelection(int rowIndex, int columnIndex, boolean toggle, boolean extend) {
// disallow selection of the header lines
if (!(getValueAt(rowIndex, columnIndex) instanceof SectionHeaderContribution)) {
super.changeSelection(rowIndex, columnIndex, toggle, extend);
}
}
};
scrollPane = new JScrollPane(table);
scrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS);
scrollPane.getVerticalScrollBar().setUI(new PdeScrollBarUI("manager.scrollbar"));
scrollPane.setBorder(BorderFactory.createEmptyBorder());
table.setFillsViewportHeight(true);
table.setDefaultRenderer(Contribution.class, new ContribCellRenderer());
table.setRowHeight(Toolkit.zoom(28));
table.setRowMargin(Toolkit.zoom(6));
TableColumnModel tcm = table.getColumnModel();
tcm.setColumnMargin(0);
tcm.getColumn(0).setMaxWidth(ManagerFrame.STATUS_WIDTH);
tcm.getColumn(2).setMinWidth(ManagerFrame.AUTHOR_WIDTH);
tcm.getColumn(2).setMaxWidth(ManagerFrame.AUTHOR_WIDTH);
table.setShowGrid(false);
table.setColumnSelectionAllowed(false);
table.setCellSelectionEnabled(false);
table.setAutoCreateColumnsFromModel(true);
table.setAutoCreateRowSorter(false);
table.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
table.getSelectionModel().addListSelectionListener(event -> {
// This is called twice for each mouse click, once on mouse press with
// event.getValueIsAdjusting()) set true, and again when the mouse is
// released where adjusting will be set false. But instead of only
// responding to one or the other, need to fire on both so that the
// selection updates while the user drags mouse across the list (and
// not just when released). Using the arrow keys will only fire once
// because adjusting will be false (no ongoing drag with keys).
int row = table.getSelectedRow();
if (row != -1) {
Contribution contrib = (Contribution) table.getValueAt(row, 0);
setSelectedDetail(detailForContrib.get(contrib));
// Preventing the focus to move out of filterField after typing every character
if (!contributionTab.filterHasFocus()) {
table.requestFocusInWindow();
}
}
});
sorter = new TableRowSorter<>(model);
table.setRowSorter(sorter);
rowFilter = new ContributionRowFilter(filter);
sorter.setRowFilter(rowFilter);
for (int i = 0; i < model.getColumnCount(); i++) {
if (model.columns[i] == ContributionColumn.NAME) {
sorter.setSortKeys(Collections.singletonList(new SortKey(i, SortOrder.ASCENDING)));
}
sorter.setComparator(i, model.columns[i].getComparator());
}
table.getTableHeader().setDefaultRenderer(new ContribHeaderRenderer());
table.getTableHeader().setReorderingAllowed(false);
table.getTableHeader().setResizingAllowed(true);
table.setVisible(true);
setOpaque(true);
setLayout(new BorderLayout());
add(scrollPane, BorderLayout.CENTER);
}
protected void updateTheme() {
headerFgColor = Theme.getColor("manager.list.header.fgcolor");
headerBgColor = Theme.getColor("manager.list.header.bgcolor");
sectionColor = Theme.getColor("manager.list.section.color");
textColor = Theme.getColor("manager.list.text.color");
selectionColor = Theme.getColor("manager.list.selection.color");
textColorIncompatible = Theme.getColor("manager.list.incompatible.text.color");
selectionColorIncompatible = Theme.getColor("manager.list.incompatible.selection.color");
rowColor = Theme.getColor("manager.list.background.color");
table.setBackground(rowColor);
foundationIcon = Toolkit.renderIcon("manager/foundation", Theme.get("manager.list.foundation.color"), ICON_SIZE);
upToDateIcon = Toolkit.renderIcon("manager/list-up-to-date", Theme.get("manager.list.icon.color"), ICON_SIZE);
updateAvailableIcon = Toolkit.renderIcon("manager/list-update-available", Theme.get("manager.list.icon.color"), ICON_SIZE);
incompatibleIcon = Toolkit.renderIcon("manager/list-incompatible", Theme.get("manager.list.icon.color"), ICON_SIZE);
downloadingIcon = Toolkit.renderIcon("manager/list-downloading", Theme.get("manager.list.icon.color"), ICON_SIZE);
((PdeScrollBarUI) scrollPane.getVerticalScrollBar().getUI()).updateTheme();
}
/**
* Render the pie chart or indeterminate spinner for table rows.
* @param amount 0..1 for a pie, -1 for indeterminate
* @param hash unique offset to prevent indeterminate from being in the same position
* @return properly scalable ImageIcon for rendering in the Table at 1x or 2x
*/
Icon renderProgressIcon(float amount, int hash) {
// final int FFS_JAVA2D = ICON_SIZE - 2;
final float FFS_JAVA2D = ICON_SIZE - 1.5f;
// final int scale = Toolkit.highResImages() ? 2 : 1;
// final int dim = ICON_SIZE * scale;
final int dim = ICON_SIZE * (Toolkit.highResImages() ? 2 : 1);
// Image image = Toolkit.offscreenGraphics(this, ICON_SIZE, ICON_SIZE);
Image image = new BufferedImage(dim, dim, BufferedImage.TYPE_INT_ARGB);
// Graphics2D g2 = (Graphics2D) image.getGraphics();
// Toolkit.prepareGraphics(g2);
Graphics2D g2 = Toolkit.prepareGraphics(image);
// g2.setColor(rowColor);
// g2.fillRect(0, 0, ICON_SIZE, ICON_SIZE);
g2.translate(0.5, 0.5);
g2.setStroke(new BasicStroke(1.5f, BasicStroke.CAP_ROUND, BasicStroke.JOIN_ROUND));
Color iconColor = Theme.getColor("manager.list.icon.color");
g2.setColor(iconColor);
// g2.drawOval(0, 0, FFS_JAVA2D, FFS_JAVA2D);
Ellipse2D circle = new Ellipse2D.Float(0, 0, FFS_JAVA2D, FFS_JAVA2D);
if (amount != -1) {
// draw ever-growing pie wedge
g2.draw(circle);
int theta = (int) (360 * amount);
// g2.fillArc(0, 0, ICON_SIZE-1, ICON_SIZE-1, 90, -theta);
Arc2D wedge = new Arc2D.Float(0, 0, FFS_JAVA2D, FFS_JAVA2D, 90, -theta, Arc2D.PIE);
g2.fill(wedge);
} else {
// draw indeterminate state
g2.fill(circle);
g2.translate(FFS_JAVA2D/2, FFS_JAVA2D/2);
// offset by epoch to avoid integer out of bounds (the date is in 2001)
final long EPOCH = 1500000000000L + Math.abs((long) hash);
int angle = (int) ((System.currentTimeMillis() - EPOCH) / 20) % 360;
g2.rotate(angle);
g2.setColor(rowColor);
float lineRadius = FFS_JAVA2D * 0.3f;
g2.draw(new Line2D.Float(-lineRadius, 0, lineRadius, 0));
}
g2.dispose();
return Toolkit.wrapIcon(image);
}
// TODO remove this, yuck [fry 220313]
protected int getScrollBarWidth() {
return scrollPane.getVerticalScrollBar().getPreferredSize().width;
}
private static int getContributionStatusRank(Contribution c) {
// Uninstalled items are at the bottom of the sort order
int pos = 4;
if (c.isInstalled()) {
pos = 1;
if (ContributionListing.getInstance().hasUpdates(c)) {
pos = 2;
}
if (!c.isCompatible()) {
// This is weird because it means some grayed-out items will
// show up before non-gray items. We probably need another
// state icon for 'installed but incompatible' [fry 220116]
pos = 3;
}
}
return pos;
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
|
ListPanel
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.