language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
spring-projects__spring-security
oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/jackson/OAuth2AccessTokenMixin.java
{ "start": 1520, "end": 1936 }
class ____ { @JsonCreator OAuth2AccessTokenMixin( @JsonProperty("tokenType") @JsonDeserialize( converter = StdConverters.AccessTokenTypeConverter.class) OAuth2AccessToken.TokenType tokenType, @JsonProperty("tokenValue") String tokenValue, @JsonProperty("issuedAt") Instant issuedAt, @JsonProperty("expiresAt") Instant expiresAt, @JsonProperty("scopes") Set<String> scopes) { } }
OAuth2AccessTokenMixin
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/collectionincompatibletype/CompatibleWithMisuseTest.java
{ "start": 4224, "end": 4844 }
class ____<Z> { // BUG: Diagnostic contains: Valid arguments are: Z, Y, X void doSomething(@CompatibleWith("P") Object bad) {} // BUG: Diagnostic contains: Valid arguments are: Q, Z, Y, X <Q> void doSomethingElse(@CompatibleWith("P") Object bad) {} } } } """) .doTest(); } @Test public void nestedTypes() { compilationHelper .addSourceLines( "Test.java", """ import com.google.errorprone.annotations.CompatibleWith;
Test3
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/query/sqm/DelegatingSqmSelectionQueryImplementorTest.java
{ "start": 437, "end": 631 }
class ____<R> extends DelegatingSqmSelectionQueryImplementor<R> { @Override protected SqmSelectionQueryImplementor<R> getDelegate() { return null; } }
DelegatingSqmSelectionQueryImplementorTest
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/basic/DurationMappingTests.java
{ "start": 1735, "end": 3902 }
class ____ { @Test public void verifyMappings(SessionFactoryScope scope) { final MappingMetamodelImplementor mappingMetamodel = scope.getSessionFactory() .getRuntimeMetamodels() .getMappingMetamodel(); final EntityPersister entityDescriptor = mappingMetamodel.findEntityDescriptor(EntityWithDuration.class); final JdbcTypeRegistry jdbcTypeRegistry = mappingMetamodel.getTypeConfiguration().getJdbcTypeRegistry(); final Dialect dialect = scope.getSessionFactory().getJdbcServices().getDialect(); final BasicAttributeMapping duration = (BasicAttributeMapping) entityDescriptor.findAttributeMapping("duration"); final JdbcMapping jdbcMapping = duration.getJdbcMapping(); assertThat(jdbcMapping.getJavaTypeDescriptor().getJavaTypeClass(), equalTo(Duration.class)); final JdbcType intervalType = jdbcTypeRegistry.getDescriptor(SqlTypes.INTERVAL_SECOND); final JdbcType realType; if (intervalType instanceof AdjustableJdbcType) { realType = ( (AdjustableJdbcType) intervalType ).resolveIndicatedType( new JdbcTypeIndicators() { @Override public TypeConfiguration getTypeConfiguration() { return mappingMetamodel.getTypeConfiguration(); } @Override public int getColumnScale() { return duration.getScale() == null ? JdbcTypeIndicators.NO_COLUMN_SCALE : duration.getScale(); } @Override public Dialect getDialect() { return dialect; } }, jdbcMapping.getJavaTypeDescriptor() ); } else { realType = intervalType; } assertThat( jdbcMapping.getJdbcType(), is( realType ) ); scope.inTransaction( (session) -> { session.persist(new EntityWithDuration(1, Duration.ofHours(3))); } ); scope.inTransaction( (session) -> session.find(EntityWithDuration.class, 1) ); scope.inTransaction( (session) -> { session.createQuery( "from EntityWithDuration e where e.duration = :param", EntityWithDuration.class ) .setParameter( "param", Duration.ofHours( 3 ) ) .getResultList(); } ); } @Entity(name = "EntityWithDuration") @Table(name = "EntityWithDuration") public static
DurationMappingTests
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/basic/OnlyLazyBasicUpdateTest.java
{ "start": 1411, "end": 6958 }
class ____ { private Long entityId; SQLStatementInspector statementInspector(SessionFactoryScope scope) { return (SQLStatementInspector) scope.getSessionFactory().getSessionFactoryOptions().getStatementInspector(); } private void initNull(SessionFactoryScope scope) { scope.inTransaction( s -> { LazyEntity entity = new LazyEntity(); s.persist( entity ); entityId = entity.getId(); } ); } private void initNonNull(SessionFactoryScope scope) { scope.inTransaction( s -> { LazyEntity entity = new LazyEntity(); entity.setLazyProperty1( "lazy1_initial" ); entity.setLazyProperty2( "lazy2_initial" ); s.persist( entity ); entityId = entity.getId(); } ); } @BeforeEach public void clearStatementInspector(SessionFactoryScope scope) { statementInspector( scope ).clear(); } @Test public void updateSomeLazyProperty_nullToNull(SessionFactoryScope scope) { initNull( scope ); scope.inTransaction( s -> { LazyEntity entity = s.get( LazyEntity.class, entityId ); entity.setLazyProperty1( null ); } ); // When a lazy property is modified Hibernate does not perform any select // but during flush an update is performed statementInspector( scope ).assertUpdate(); } @Test public void updateSomeLazyProperty_nullToNonNull(SessionFactoryScope scope) { initNull( scope ); scope.inTransaction( s -> { LazyEntity entity = s.get( LazyEntity.class, entityId ); entity.setLazyProperty1( "lazy1_update" ); } ); scope.inTransaction( s -> { LazyEntity entity = s.get( LazyEntity.class, entityId ); assertEquals( "lazy1_update", entity.getLazyProperty1() ); assertNull( entity.getLazyProperty2() ); } ); } @Test public void updateSomeLazyProperty_nonNullToNonNull_differentValues(SessionFactoryScope scope) { initNonNull( scope ); scope.inTransaction( s -> { LazyEntity entity = s.get( LazyEntity.class, entityId ); entity.setLazyProperty1( "lazy1_update" ); } ); scope.inTransaction( s -> { LazyEntity entity = s.get( LazyEntity.class, entityId ); assertEquals( "lazy1_update", entity.getLazyProperty1() ); assertEquals( "lazy2_initial", entity.getLazyProperty2() ); } ); } @Test public void updateSomeLazyProperty_nonNullToNonNull_sameValues(SessionFactoryScope scope) { initNonNull( scope ); scope.inTransaction( s -> { LazyEntity entity = s.get( LazyEntity.class, entityId ); entity.setLazyProperty1( entity.getLazyProperty1() ); } ); // We should not update entities when property values did not change statementInspector( scope ).assertNoUpdate(); } @Test public void updateSomeLazyProperty_nonNullToNull(SessionFactoryScope scope) { initNonNull( scope ); scope.inTransaction( s -> { LazyEntity entity = s.get( LazyEntity.class, entityId ); entity.setLazyProperty1( null ); } ); scope.inTransaction( s -> { LazyEntity entity = s.get( LazyEntity.class, entityId ); assertNull( entity.getLazyProperty1() ); assertEquals( "lazy2_initial", entity.getLazyProperty2() ); } ); } @Test public void updateAllLazyProperties_nullToNull(SessionFactoryScope scope) { initNull( scope ); scope.inTransaction( s -> { LazyEntity entity = s.get( LazyEntity.class, entityId ); entity.setLazyProperty1( null ); entity.setLazyProperty2( null ); } ); // When a lazy property is modified Hibernate does not perform any select // but during flush an update is performed statementInspector( scope ).assertUpdate(); } @Test public void updateAllLazyProperties_nullToNonNull(SessionFactoryScope scope) { initNull( scope ); scope.inTransaction( s -> { LazyEntity entity = s.get( LazyEntity.class, entityId ); entity.setLazyProperty1( "lazy1_update" ); entity.setLazyProperty2( "lazy2_update" ); } ); scope.inTransaction( s -> { LazyEntity entity = s.get( LazyEntity.class, entityId ); assertEquals( "lazy1_update", entity.getLazyProperty1() ); assertEquals( "lazy2_update", entity.getLazyProperty2() ); } ); } @Test public void updateAllLazyProperties_nonNullToNonNull_differentValues(SessionFactoryScope scope) { initNonNull( scope ); scope.inTransaction( s -> { LazyEntity entity = s.get( LazyEntity.class, entityId ); entity.setLazyProperty1( "lazy1_update" ); entity.setLazyProperty2( "lazy2_update" ); } ); scope.inTransaction( s -> { LazyEntity entity = s.get( LazyEntity.class, entityId ); assertEquals( "lazy1_update", entity.getLazyProperty1() ); assertEquals( "lazy2_update", entity.getLazyProperty2() ); } ); } @Test public void updateAllLazyProperties_nonNullToNonNull_sameValues(SessionFactoryScope scope) { initNonNull( scope ); scope.inTransaction( s -> { LazyEntity entity = s.get( LazyEntity.class, entityId ); entity.setLazyProperty1( entity.getLazyProperty1() ); entity.setLazyProperty2( entity.getLazyProperty2() ); } ); // We should not update entities when property values did not change statementInspector( scope ).assertNoUpdate(); } @Test public void updateAllLazyProperties_nonNullToNull(SessionFactoryScope scope) { initNonNull( scope ); scope.inTransaction( s -> { LazyEntity entity = s.get( LazyEntity.class, entityId ); entity.setLazyProperty1( null ); entity.setLazyProperty2( null ); } ); scope.inTransaction( s -> { LazyEntity entity = s.get( LazyEntity.class, entityId ); assertNull( entity.getLazyProperty1() ); assertNull( entity.getLazyProperty2() ); } ); } @Entity @Table(name = "LAZY_ENTITY") static
OnlyLazyBasicUpdateTest
java
elastic__elasticsearch
server/src/test/java/org/elasticsearch/cluster/routing/allocation/ReplicaAllocatedAfterPrimaryTests.java
{ "start": 1529, "end": 5530 }
class ____ extends ESAllocationTestCase { public void testBackupIsAllocatedAfterPrimary() { AllocationService strategy = createAllocationService( Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build() ); logger.info("Building initial routing table"); Metadata metadata = Metadata.builder() .put(IndexMetadata.builder("test").settings(settings(IndexVersion.current())).numberOfShards(1).numberOfReplicas(1)) .build(); RoutingTable routingTable = RoutingTable.builder(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY) .addAsNew(metadata.getProject().index("test")) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).routingTable(routingTable).build(); assertThat(routingTable.index("test").size(), equalTo(1)); assertThat(routingTable.index("test").shard(0).size(), equalTo(2)); assertThat(routingTable.index("test").shard(0).shard(0).state(), equalTo(UNASSIGNED)); assertThat(routingTable.index("test").shard(0).shard(1).state(), equalTo(UNASSIGNED)); assertThat(routingTable.index("test").shard(0).shard(0).currentNodeId(), nullValue()); assertThat(routingTable.index("test").shard(0).shard(1).currentNodeId(), nullValue()); logger.info("Adding one node and performing rerouting"); clusterState = ClusterState.builder(clusterState) .nodes(DiscoveryNodes.builder().add(newNode("node1")).add(newNode("node2"))) .build(); RoutingTable prevRoutingTable = routingTable; routingTable = strategy.reroute(clusterState, "reroute", ActionListener.noop()).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); final String nodeHoldingPrimary = routingTable.index("test").shard(0).primaryShard().currentNodeId(); assertThat(prevRoutingTable != routingTable, equalTo(true)); assertThat(routingTable.index("test").size(), equalTo(1)); assertThat(routingTable.index("test").shard(0).size(), equalTo(2)); assertThat(routingTable.index("test").shard(0).primaryShard().state(), equalTo(INITIALIZING)); assertThat(routingTable.index("test").shard(0).primaryShard().currentNodeId(), equalTo(nodeHoldingPrimary)); assertThat(routingTable.index("test").shard(0).replicaShards().size(), equalTo(1)); assertThat(routingTable.index("test").shard(0).replicaShards().get(0).state(), equalTo(UNASSIGNED)); assertThat(routingTable.index("test").shard(0).replicaShards().get(0).currentNodeId(), nullValue()); logger.info("Start all the primary shards"); RoutingNodes routingNodes = clusterState.getRoutingNodes(); prevRoutingTable = routingTable; routingTable = startInitializingShardsAndReroute(strategy, clusterState, routingNodes.node(nodeHoldingPrimary)).routingTable(); final String nodeHoldingReplica = routingTable.index("test").shard(0).replicaShards().get(0).currentNodeId(); assertThat(nodeHoldingPrimary, not(equalTo(nodeHoldingReplica))); assertThat(prevRoutingTable != routingTable, equalTo(true)); assertThat(routingTable.index("test").size(), equalTo(1)); assertThat(routingTable.index("test").shard(0).size(), equalTo(2)); assertThat(routingTable.index("test").shard(0).primaryShard().state(), equalTo(STARTED)); assertThat(routingTable.index("test").shard(0).primaryShard().currentNodeId(), equalTo(nodeHoldingPrimary)); assertThat(routingTable.index("test").shard(0).replicaShards().size(), equalTo(1)); assertThat(routingTable.index("test").shard(0).replicaShards().get(0).state(), equalTo(INITIALIZING)); assertThat(routingTable.index("test").shard(0).replicaShards().get(0).currentNodeId(), equalTo(nodeHoldingReplica)); } }
ReplicaAllocatedAfterPrimaryTests
java
spring-projects__spring-boot
module/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/endpoint/web/annotation/WebEndpointDiscovererTests.java
{ "start": 3551, "end": 15308 }
class ____ { @Test void getEndpointsWhenNoEndpointBeansShouldReturnEmptyCollection() { load(EmptyConfiguration.class, (discoverer) -> assertThat(discoverer.getEndpoints()).isEmpty()); } @Test void getEndpointsWhenWebExtensionIsMissingEndpointShouldThrowException() { load(TestWebEndpointExtensionConfiguration.class, (discoverer) -> assertThatIllegalStateException().isThrownBy(discoverer::getEndpoints) .withMessageContaining("Invalid extension 'endpointExtension': no endpoint found with id 'test'")); } @Test void getEndpointsWhenHasFilteredEndpointShouldOnlyDiscoverWebEndpoints() { load(MultipleEndpointsConfiguration.class, (discoverer) -> { Map<EndpointId, ExposableWebEndpoint> endpoints = mapEndpoints(discoverer.getEndpoints()); assertThat(endpoints).containsOnlyKeys(EndpointId.of("test")); }); } @Test void getEndpointsWhenHasWebExtensionShouldOverrideStandardEndpoint() { load(OverriddenOperationWebEndpointExtensionConfiguration.class, (discoverer) -> { Map<EndpointId, ExposableWebEndpoint> endpoints = mapEndpoints(discoverer.getEndpoints()); assertThat(endpoints).containsOnlyKeys(EndpointId.of("test")); ExposableWebEndpoint endpoint = endpoints.get(EndpointId.of("test")); assertThat(endpoint).isNotNull(); assertThat(requestPredicates(endpoint)).has(requestPredicates( path("test").httpMethod(WebEndpointHttpMethod.GET).consumes().produces("application/json"))); }); } @Test void getEndpointsWhenExtensionAddsOperationShouldHaveBothOperations() { load(AdditionalOperationWebEndpointConfiguration.class, (discoverer) -> { Map<EndpointId, ExposableWebEndpoint> endpoints = mapEndpoints(discoverer.getEndpoints()); assertThat(endpoints).containsOnlyKeys(EndpointId.of("test")); ExposableWebEndpoint endpoint = endpoints.get(EndpointId.of("test")); assertThat(endpoint).isNotNull(); assertThat(requestPredicates(endpoint)).has(requestPredicates( path("test").httpMethod(WebEndpointHttpMethod.GET).consumes().produces("application/json"), path("test/{id}").httpMethod(WebEndpointHttpMethod.GET).consumes().produces("application/json"))); }); } @Test void getEndpointsWhenPredicateForWriteOperationThatReturnsVoidShouldHaveNoProducedMediaTypes() { load(VoidWriteOperationEndpointConfiguration.class, (discoverer) -> { Map<EndpointId, ExposableWebEndpoint> endpoints = mapEndpoints(discoverer.getEndpoints()); assertThat(endpoints).containsOnlyKeys(EndpointId.of("voidwrite")); ExposableWebEndpoint endpoint = endpoints.get(EndpointId.of("voidwrite")); assertThat(endpoint).isNotNull(); assertThat(requestPredicates(endpoint)).has(requestPredicates( path("voidwrite").httpMethod(WebEndpointHttpMethod.POST).produces().consumes("application/json"))); }); } @Test void getEndpointsWhenTwoExtensionsHaveTheSameEndpointTypeShouldThrowException() { load(ClashingWebEndpointConfiguration.class, (discoverer) -> assertThatIllegalStateException().isThrownBy(discoverer::getEndpoints) .withMessageContaining("Found multiple extensions for the endpoint bean " + "testEndpoint (testExtensionOne, testExtensionTwo)")); } @Test void getEndpointsWhenTwoStandardEndpointsHaveTheSameIdShouldThrowException() { load(ClashingStandardEndpointConfiguration.class, (discoverer) -> assertThatIllegalStateException().isThrownBy(discoverer::getEndpoints) .withMessageContaining("Found two endpoints with the id 'test': ")); } @Test void getEndpointsWhenWhenEndpointHasTwoOperationsWithTheSameNameShouldThrowException() { load(ClashingOperationsEndpointConfiguration.class, (discoverer) -> assertThatIllegalStateException().isThrownBy(discoverer::getEndpoints) .withMessageContaining("Unable to map duplicate endpoint operations: " + "[web request predicate GET to path 'test' " + "produces: application/json] to clashingOperationsEndpoint")); } @Test void getEndpointsWhenExtensionIsNotCompatibleWithTheEndpointTypeShouldThrowException() { load(InvalidWebExtensionConfiguration.class, (discoverer) -> assertThatIllegalStateException().isThrownBy(discoverer::getEndpoints) .withMessageContaining("Endpoint bean 'nonWebEndpoint' cannot support the " + "extension bean 'nonWebWebEndpointExtension'")); } @Test void getEndpointsWhenWhenExtensionHasTwoOperationsWithTheSameNameShouldThrowException() { load(ClashingSelectorsWebEndpointExtensionConfiguration.class, (discoverer) -> assertThatIllegalStateException().isThrownBy(discoverer::getEndpoints) .withMessageContaining("Unable to map duplicate endpoint operations") .withMessageContaining("to testEndpoint (clashingSelectorsExtension)")); } @Test void getEndpointsWhenHasCacheWithTtlShouldCacheReadOperationWithTtlValue() { load((id) -> 500L, EndpointId::toString, TestEndpointConfiguration.class, (discoverer) -> { Map<EndpointId, ExposableWebEndpoint> endpoints = mapEndpoints(discoverer.getEndpoints()); assertThat(endpoints).containsOnlyKeys(EndpointId.of("test")); ExposableWebEndpoint endpoint = endpoints.get(EndpointId.of("test")); assertThat(endpoint).isNotNull(); assertThat(endpoint.getOperations()).hasSize(1); WebOperation operation = endpoint.getOperations().iterator().next(); Object invoker = ReflectionTestUtils.getField(operation, "invoker"); assertThat(invoker).isInstanceOf(CachingOperationInvoker.class); assertThat(((CachingOperationInvoker) invoker).getTimeToLive()).isEqualTo(500); }); } @Test void getEndpointsWhenOperationReturnsResourceShouldProduceApplicationOctetStream() { load(ResourceEndpointConfiguration.class, (discoverer) -> { Map<EndpointId, ExposableWebEndpoint> endpoints = mapEndpoints(discoverer.getEndpoints()); assertThat(endpoints).containsOnlyKeys(EndpointId.of("resource")); ExposableWebEndpoint endpoint = endpoints.get(EndpointId.of("resource")); assertThat(endpoint).isNotNull(); assertThat(requestPredicates(endpoint)) .has(requestPredicates(path("resource").httpMethod(WebEndpointHttpMethod.GET) .consumes() .produces("application/octet-stream"))); }); } @Test void getEndpointsWhenHasCustomMediaTypeShouldProduceCustomMediaType() { load(CustomMediaTypesEndpointConfiguration.class, (discoverer) -> { Map<EndpointId, ExposableWebEndpoint> endpoints = mapEndpoints(discoverer.getEndpoints()); assertThat(endpoints).containsOnlyKeys(EndpointId.of("custommediatypes")); ExposableWebEndpoint endpoint = endpoints.get(EndpointId.of("custommediatypes")); assertThat(endpoint).isNotNull(); assertThat(requestPredicates(endpoint)).has(requestPredicates( path("custommediatypes").httpMethod(WebEndpointHttpMethod.GET).consumes().produces("text/plain"), path("custommediatypes").httpMethod(WebEndpointHttpMethod.POST).consumes().produces("a/b", "c/d"), path("custommediatypes").httpMethod(WebEndpointHttpMethod.DELETE) .consumes() .produces("text/plain"))); }); } @Test void getEndpointsWhenHasCustomPathShouldReturnCustomPath() { load((id) -> null, (id) -> "custom/" + id, AdditionalOperationWebEndpointConfiguration.class, (discoverer) -> { Map<EndpointId, ExposableWebEndpoint> endpoints = mapEndpoints(discoverer.getEndpoints()); assertThat(endpoints).containsOnlyKeys(EndpointId.of("test")); ExposableWebEndpoint endpoint = endpoints.get(EndpointId.of("test")); Condition<List<? extends WebOperationRequestPredicate>> expected = requestPredicates( path("custom/test").httpMethod(WebEndpointHttpMethod.GET).consumes().produces("application/json"), path("custom/test/{id}").httpMethod(WebEndpointHttpMethod.GET) .consumes() .produces("application/json")); assertThat(endpoint).isNotNull(); assertThat(requestPredicates(endpoint)).has(expected); }); } @Test void getEndpointsWhenHasAdditionalPaths() { AdditionalPathsMapper additionalPathsMapper = (id, webServerNamespace) -> { if (!WebServerNamespace.SERVER.equals(webServerNamespace)) { return Collections.emptyList(); } return List.of("/test"); }; load((id) -> null, EndpointId::toString, additionalPathsMapper, AdditionalOperationWebEndpointConfiguration.class, (discoverer) -> { Map<EndpointId, ExposableWebEndpoint> endpoints = mapEndpoints(discoverer.getEndpoints()); ExposableWebEndpoint endpoint = endpoints.get(EndpointId.of("test")); assertThat(endpoint).isNotNull(); assertThat(endpoint.getAdditionalPaths(WebServerNamespace.SERVER)).containsExactly("/test"); assertThat(endpoint.getAdditionalPaths(WebServerNamespace.MANAGEMENT)).isEmpty(); }); } @Test void shouldRegisterHints() { RuntimeHints runtimeHints = new RuntimeHints(); new WebEndpointDiscovererRuntimeHints().registerHints(runtimeHints, getClass().getClassLoader()); assertThat(RuntimeHintsPredicates.reflection() .onType(WebEndpointFilter.class) .withMemberCategories(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS)).accepts(runtimeHints); } private void load(Class<?> configuration, Consumer<WebEndpointDiscoverer> consumer) { load((id) -> null, EndpointId::toString, configuration, consumer); } private void load(Function<EndpointId, @Nullable Long> timeToLive, PathMapper endpointPathMapper, Class<?> configuration, Consumer<WebEndpointDiscoverer> consumer) { load(timeToLive, endpointPathMapper, null, configuration, consumer); } private void load(Function<EndpointId, @Nullable Long> timeToLive, PathMapper endpointPathMapper, @Nullable AdditionalPathsMapper additionalPathsMapper, Class<?> configuration, Consumer<WebEndpointDiscoverer> consumer) { try (AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(configuration)) { ConversionServiceParameterValueMapper parameterMapper = new ConversionServiceParameterValueMapper( DefaultConversionService.getSharedInstance()); EndpointMediaTypes mediaTypes = new EndpointMediaTypes(Collections.singletonList("application/json"), Collections.singletonList("application/json")); WebEndpointDiscoverer discoverer = new WebEndpointDiscoverer(context, parameterMapper, mediaTypes, Collections.singletonList(endpointPathMapper), (additionalPathsMapper != null) ? Collections.singletonList(additionalPathsMapper) : null, Collections.singleton(new CachingOperationInvokerAdvisor(timeToLive)), Collections.emptyList(), Collections.emptyList()); consumer.accept(discoverer); } } private Map<EndpointId, ExposableWebEndpoint> mapEndpoints(Collection<ExposableWebEndpoint> endpoints) { Map<EndpointId, ExposableWebEndpoint> endpointById = new HashMap<>(); endpoints.forEach((endpoint) -> endpointById.put(endpoint.getEndpointId(), endpoint)); return endpointById; } private List<WebOperationRequestPredicate> requestPredicates(ExposableWebEndpoint endpoint) { return endpoint.getOperations().stream().map(WebOperation::getRequestPredicate).toList(); } private Condition<List<? extends WebOperationRequestPredicate>> requestPredicates( RequestPredicateMatcher... matchers) { return new Condition<>((predicates) -> { if (predicates.size() != matchers.length) { return false; } Map<WebOperationRequestPredicate, Long> matchCounts = new HashMap<>(); for (WebOperationRequestPredicate predicate : predicates) { matchCounts.put(predicate, Stream.of(matchers).filter((matcher) -> matcher.matches(predicate)).count()); } return matchCounts.values().stream().noneMatch((count) -> count != 1); }, Arrays.toString(matchers)); } private RequestPredicateMatcher path(String path) { return new RequestPredicateMatcher(path); } @Configuration(proxyBeanMethods = false) static
WebEndpointDiscovererTests
java
apache__camel
components/camel-cm-sms/src/main/java/org/apache/camel/component/cm/CMSender.java
{ "start": 907, "end": 1037 }
interface ____ { /** * Sends a validated sms message to CM Endpoints. */ void send(CMMessage cmMessage); }
CMSender
java
spring-projects__spring-framework
spring-orm/src/main/java/org/springframework/orm/jpa/support/OpenEntityManagerInViewFilter.java
{ "start": 3110, "end": 9927 }
class ____ extends OncePerRequestFilter { /** * Default EntityManagerFactory bean name: "entityManagerFactory". * Only applies when no "persistenceUnitName" param has been specified. * @see #setEntityManagerFactoryBeanName * @see #setPersistenceUnitName */ public static final String DEFAULT_ENTITY_MANAGER_FACTORY_BEAN_NAME = "entityManagerFactory"; private @Nullable String entityManagerFactoryBeanName; private @Nullable String persistenceUnitName; private volatile @Nullable EntityManagerFactory entityManagerFactory; /** * Set the bean name of the EntityManagerFactory to fetch from Spring's * root application context. * <p>Default is "entityManagerFactory". Note that this default only applies * when no "persistenceUnitName" param has been specified. * @see #setPersistenceUnitName * @see #DEFAULT_ENTITY_MANAGER_FACTORY_BEAN_NAME */ public void setEntityManagerFactoryBeanName(@Nullable String entityManagerFactoryBeanName) { this.entityManagerFactoryBeanName = entityManagerFactoryBeanName; } /** * Return the bean name of the EntityManagerFactory to fetch from Spring's * root application context. */ protected @Nullable String getEntityManagerFactoryBeanName() { return this.entityManagerFactoryBeanName; } /** * Set the name of the persistence unit to access the EntityManagerFactory for. * <p>This is an alternative to specifying the EntityManagerFactory by bean name, * resolving it by its persistence unit name instead. If no bean name and no persistence * unit name have been specified, we'll check whether a bean exists for the default * bean name "entityManagerFactory"; if not, a default EntityManagerFactory will * be retrieved through finding a single unique bean of type EntityManagerFactory. * @see #setEntityManagerFactoryBeanName * @see #DEFAULT_ENTITY_MANAGER_FACTORY_BEAN_NAME */ public void setPersistenceUnitName(@Nullable String persistenceUnitName) { this.persistenceUnitName = persistenceUnitName; } /** * Return the name of the persistence unit to access the EntityManagerFactory for, if any. */ protected @Nullable String getPersistenceUnitName() { return this.persistenceUnitName; } /** * Returns "false" so that the filter may re-bind the opened {@code EntityManager} * to each asynchronously dispatched thread and postpone closing it until the very * last asynchronous dispatch. */ @Override protected boolean shouldNotFilterAsyncDispatch() { return false; } /** * Returns "false" so that the filter may provide an {@code EntityManager} * to each error dispatches. */ @Override protected boolean shouldNotFilterErrorDispatch() { return false; } @Override protected void doFilterInternal( HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) throws ServletException, IOException { EntityManagerFactory emf = lookupEntityManagerFactory(request); boolean participate = false; WebAsyncManager asyncManager = WebAsyncUtils.getAsyncManager(request); String key = getAlreadyFilteredAttributeName(); if (TransactionSynchronizationManager.hasResource(emf)) { // Do not modify the EntityManager: just set the participate flag. participate = true; } else { boolean isFirstRequest = !isAsyncDispatch(request); if (isFirstRequest || !applyEntityManagerBindingInterceptor(asyncManager, key)) { logger.debug("Opening JPA EntityManager in OpenEntityManagerInViewFilter"); try { EntityManager em = createEntityManager(emf); EntityManagerHolder emHolder = new EntityManagerHolder(em); TransactionSynchronizationManager.bindResource(emf, emHolder); AsyncRequestInterceptor interceptor = new AsyncRequestInterceptor(emf, emHolder); asyncManager.registerCallableInterceptor(key, interceptor); asyncManager.registerDeferredResultInterceptor(key, interceptor); } catch (PersistenceException ex) { throw new DataAccessResourceFailureException("Could not create JPA EntityManager", ex); } } } try { filterChain.doFilter(request, response); } finally { if (!participate) { EntityManagerHolder emHolder = (EntityManagerHolder) TransactionSynchronizationManager.unbindResource(emf); if (!isAsyncStarted(request)) { logger.debug("Closing JPA EntityManager in OpenEntityManagerInViewFilter"); EntityManagerFactoryUtils.closeEntityManager(emHolder.getEntityManager()); } } } } /** * Look up the EntityManagerFactory that this filter should use, * taking the current HTTP request as argument. * <p>The default implementation delegates to the {@code lookupEntityManagerFactory} * without arguments, caching the EntityManagerFactory reference once obtained. * @return the EntityManagerFactory to use * @see #lookupEntityManagerFactory() */ protected EntityManagerFactory lookupEntityManagerFactory(HttpServletRequest request) { EntityManagerFactory emf = this.entityManagerFactory; if (emf == null) { emf = lookupEntityManagerFactory(); this.entityManagerFactory = emf; } return emf; } /** * Look up the EntityManagerFactory that this filter should use. * <p>The default implementation looks for a bean with the specified name * in Spring's root application context. * @return the EntityManagerFactory to use * @see #getEntityManagerFactoryBeanName */ protected EntityManagerFactory lookupEntityManagerFactory() { WebApplicationContext wac = WebApplicationContextUtils.getRequiredWebApplicationContext(getServletContext()); String emfBeanName = getEntityManagerFactoryBeanName(); String puName = getPersistenceUnitName(); if (StringUtils.hasLength(emfBeanName)) { return wac.getBean(emfBeanName, EntityManagerFactory.class); } else if (!StringUtils.hasLength(puName) && wac.containsBean(DEFAULT_ENTITY_MANAGER_FACTORY_BEAN_NAME)) { return wac.getBean(DEFAULT_ENTITY_MANAGER_FACTORY_BEAN_NAME, EntityManagerFactory.class); } else { // Includes fallback search for single EntityManagerFactory bean by type. return EntityManagerFactoryUtils.findEntityManagerFactory(wac, puName); } } /** * Create a JPA EntityManager to be bound to a request. * <p>Can be overridden in subclasses. * @param emf the EntityManagerFactory to use * @see jakarta.persistence.EntityManagerFactory#createEntityManager() */ protected EntityManager createEntityManager(EntityManagerFactory emf) { return emf.createEntityManager(); } private boolean applyEntityManagerBindingInterceptor(WebAsyncManager asyncManager, String key) { CallableProcessingInterceptor cpi = asyncManager.getCallableInterceptor(key); if (cpi == null) { return false; } ((AsyncRequestInterceptor) cpi).bindEntityManager(); return true; } }
OpenEntityManagerInViewFilter
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/embeddable/ElementCollectionLazyToOneTest.java
{ "start": 2555, "end": 3194 }
class ____ { @Id private Long id; private String name; @ElementCollection private List<TheEmbeddable> embeddables; public TheEntity() { } public TheEntity(Long id, String name) { this.id = id; this.name = name; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public List<TheEmbeddable> getEmbeddables() { return embeddables; } public void setEmbeddables(List<TheEmbeddable> embeddables) { this.embeddables = embeddables; } } }
TheEntity
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-globalpolicygenerator/src/main/java/org/apache/hadoop/yarn/server/globalpolicygenerator/policygenerator/GlobalPolicy.java
{ "start": 1206, "end": 1340 }
interface ____ the plug-able policy that the PolicyGenerator uses * to update policies into the state store. */ public abstract
defines
java
lettuce-io__lettuce-core
src/test/java/io/lettuce/core/dynamic/RedisCommandsReactiveIntegrationTests.java
{ "start": 693, "end": 2847 }
class ____ extends TestSupport { private final RedisCommands<String, String> redis; @Inject RedisCommandsReactiveIntegrationTests(StatefulRedisConnection<String, String> connection) { this.redis = connection.sync(); } @BeforeEach void setUp() { this.redis.flushall(); } @Test void reactive() { RedisCommandFactory factory = new RedisCommandFactory(redis.getStatefulConnection()); MultipleExecutionModels api = factory.getCommands(MultipleExecutionModels.class); StepVerifier.create(api.setReactive(key, value)).expectNext("OK").verifyComplete(); } @Test void shouldHandlePresentValue() { RedisCommandFactory factory = new RedisCommandFactory(redis.getStatefulConnection()); MultipleExecutionModels api = factory.getCommands(MultipleExecutionModels.class); StepVerifier.create(api.setReactive(key, value)).expectNext("OK").verifyComplete(); StepVerifier.create(api.get(key)).expectNext(value).verifyComplete(); } @Test void shouldHandleAbsentValue() { RedisCommandFactory factory = new RedisCommandFactory(redis.getStatefulConnection()); MultipleExecutionModels api = factory.getCommands(MultipleExecutionModels.class); StepVerifier.create(api.get("unknown")).verifyComplete(); } @Test void shouldHandlePresentValueRxJava() throws InterruptedException { RedisCommandFactory factory = new RedisCommandFactory(redis.getStatefulConnection()); MultipleExecutionModels api = factory.getCommands(MultipleExecutionModels.class); StepVerifier.create(api.setReactive(key, value)).expectNext("OK").verifyComplete(); api.getRxJava(key).test().await().onSuccess(value); } @Test void shouldHandleAbsentValueRxJava() throws InterruptedException { RedisCommandFactory factory = new RedisCommandFactory(redis.getStatefulConnection()); MultipleExecutionModels api = factory.getCommands(MultipleExecutionModels.class); api.getRxJava(key).test().await().onSuccess(null); }
RedisCommandsReactiveIntegrationTests
java
playframework__playframework
web/play-java-forms/src/main/java/play/data/validation/Constraints.java
{ "start": 19441, "end": 19864 }
interface ____ { String message() default ValidateWithValidator.defaultMessage; Class<?>[] groups() default {}; Class<? extends Payload>[] payload() default {}; Class<? extends Validator> value(); /** Defines several {@code @ValidateWith} annotations on the same element. */ @Target({METHOD, FIELD, ANNOTATION_TYPE, CONSTRUCTOR, PARAMETER, TYPE_USE}) @Retention(RUNTIME) public @
ValidateWith
java
spring-projects__spring-data-jpa
spring-data-envers/src/test/java/org/springframework/data/envers/repository/support/RepositoryIntegrationTests.java
{ "start": 2133, "end": 8277 }
class ____ { @Autowired LicenseRepository licenseRepository; @Autowired CountryRepository countryRepository; @BeforeEach void setUp() { licenseRepository.deleteAll(); countryRepository.deleteAll(); } @Test void testLifeCycle() { License license = new License(); license.name = "Schnitzel"; licenseRepository.save(license); Country de = new Country(); de.code = "de"; de.name = "Deutschland"; countryRepository.save(de); Country se = new Country(); se.code = "se"; se.name = "Schweden"; countryRepository.save(se); license.laender = new HashSet<>(); license.laender.addAll(Arrays.asList(de, se)); licenseRepository.save(license); de.name = "Daenemark"; countryRepository.save(de); Optional<Revision<Integer, License>> revision = licenseRepository.findLastChangeRevision(license.id); assertThat(revision).hasValueSatisfying(it -> { Page<Revision<Integer, License>> page = licenseRepository.findRevisions(license.id, PageRequest.of(0, 10)); Revisions<Integer, License> revisions = Revisions.of(page.getContent()); Revision<Integer, License> latestRevision = revisions.getLatestRevision(); assertThat(latestRevision.getRequiredRevisionNumber()).isEqualTo(it.getRequiredRevisionNumber()); assertThat(latestRevision.getEntity()).isEqualTo(it.getEntity()); }); } @Test // GH-3999 void shouldReturnUnpagedResults() { License license = new License(); license.name = "Schnitzel"; licenseRepository.save(license); Country de = new Country(); de.code = "de"; de.name = "Deutschland"; countryRepository.save(de); Country se = new Country(); se.code = "se"; se.name = "Schweden"; countryRepository.save(se); license.laender = new HashSet<>(); license.laender.addAll(Arrays.asList(de, se)); licenseRepository.save(license); de.name = "Daenemark"; countryRepository.save(de); Page<Revision<Integer, License>> revisions = licenseRepository.findRevisions(license.id, Pageable.unpaged()); assertThat(revisions).hasSize(2); } @Test // #1 void returnsEmptyLastRevisionForUnrevisionedEntity() { assertThat(countryRepository.findLastChangeRevision(100L)).isEmpty(); } @Test // #47 void returnsEmptyRevisionsForUnrevisionedEntity() { assertThat(countryRepository.findRevisions(100L)).isEmpty(); } @Test // #47 void returnsEmptyRevisionForUnrevisionedEntity() { assertThat(countryRepository.findRevision(100L, 23)).isEmpty(); } @Test // #31 void returnsParticularRevisionForAnEntity() { Country de = new Country(); de.code = "de"; de.name = "Deutschland"; countryRepository.save(de); de.name = "Germany"; countryRepository.save(de); Revisions<Integer, Country> revisions = countryRepository.findRevisions(de.id); assertThat(revisions).hasSize(2); Iterator<Revision<Integer, Country>> iterator = revisions.iterator(); Revision<Integer, Country> first = iterator.next(); Revision<Integer, Country> second = iterator.next(); assertThat(countryRepository.findRevision(de.id, first.getRequiredRevisionNumber())) .hasValueSatisfying(it -> assertThat(it.getEntity().name).isEqualTo("Deutschland")); assertThat(countryRepository.findRevision(de.id, second.getRequiredRevisionNumber())) .hasValueSatisfying(it -> assertThat(it.getEntity().name).isEqualTo("Germany")); } @Test // #55 void considersRevisionNumberSortOrder() { Country de = new Country(); de.code = "de"; de.name = "Deutschland"; countryRepository.save(de); de.name = "Germany"; countryRepository.save(de); Page<Revision<Integer, Country>> page = countryRepository.findRevisions(de.id, PageRequest.of(0, 10, RevisionSort.desc())); assertThat(page).hasSize(2); assertThat(page.getContent().get(0).getRequiredRevisionNumber()) .isGreaterThan(page.getContent().get(1).getRequiredRevisionNumber()); } @Test // #21 void findsDeletedRevisions() { Country de = new Country(); de.code = "de"; de.name = "Deutschland"; countryRepository.save(de); countryRepository.delete(de); Revisions<Integer, Country> revisions = countryRepository.findRevisions(de.id); assertThat(revisions).hasSize(2); assertThat(revisions.getLatestRevision().getEntity()) // .extracting(c -> c.name, c -> c.code) // .containsExactly(null, null); } @Test // #47 void includesCorrectRevisionType() { Country de = new Country(); de.code = "de"; de.name = "Deutschland"; countryRepository.save(de); de.name = "Bundes Republik Deutschland"; countryRepository.save(de); countryRepository.delete(de); Revisions<Integer, Country> revisions = countryRepository.findRevisions(de.id); assertThat(revisions) // .extracting(r -> r.getMetadata().getRevisionType()) // .containsExactly( // INSERT, // UPDATE, // DELETE // ); } @Test // #146 void shortCircuitingWhenOffsetIsToLarge() { Country de = new Country(); de.code = "de"; de.name = "Deutschland"; countryRepository.save(de); countryRepository.delete(de); check(de.id, 0, 1, 2); check(de.id, 1, 1, 2); check(de.id, 2, 0, 2); } @Test // #47 void paginationWithEmptyResult() { check(-23L, 0, 0, 0); } @Test // Envers #379 void testSort_pageableByProperty() { Country de = new Country(); de.code = "de"; de.name = "Deutschland"; de.timestamp = Instant.parse("2000-01-01T00:00:00Z"); countryRepository.save(de); de.timestamp = Instant.parse("2000-01-04T00:01:00Z"); countryRepository.save(de); de.timestamp = Instant.parse("2000-01-04T00:00:00Z"); countryRepository.save(de); assertThat(countryRepository.findRevisions(de.id, PageRequest.of(0, 3, Sort.by("timestamp"))).map(Revision::getEntity).map(country -> country.timestamp).getContent()) .isSortedAccordingTo(Instant::compareTo); } void check(Long id, int page, int expectedSize, int expectedTotalSize) { Page<Revision<Integer, Country>> revisions = countryRepository.findRevisions(id, PageRequest.of(page, 1)); assertThat(revisions).hasSize(expectedSize); assertThat(revisions.getTotalElements()).isEqualTo(expectedTotalSize); } }
RepositoryIntegrationTests
java
spring-projects__spring-boot
module/spring-boot-security-oauth2-resource-server/src/main/java/org/springframework/boot/security/oauth2/server/resource/autoconfigure/servlet/OAuth2ResourceServerJwtConfiguration.java
{ "start": 8443, "end": 8971 }
class ____ { @Bean @ConditionalOnBean(JwtDecoder.class) SecurityFilterChain jwtSecurityFilterChain(HttpSecurity http) { http.authorizeHttpRequests((requests) -> requests.anyRequest().authenticated()); http.oauth2ResourceServer((resourceServer) -> resourceServer.jwt(withDefaults())); return http.build(); } } @Configuration(proxyBeanMethods = false) @ConditionalOnMissingBean(JwtAuthenticationConverter.class) @Conditional(JwtConverterPropertiesCondition.class) static
OAuth2SecurityFilterChainConfiguration
java
spring-projects__spring-framework
spring-webflux/src/main/java/org/springframework/web/reactive/result/method/annotation/AbstractNamedValueArgumentResolver.java
{ "start": 2822, "end": 2936 }
class ____. * * @author Rossen Stoyanchev * @author Sebastien Deleuze * @since 5.0 */ public abstract
constructor
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/helper/AppInfoJsonVerifications.java
{ "start": 1647, "end": 6220 }
class ____ { private AppInfoJsonVerifications() { //utility class } /** * Tests whether {@link AppInfo} representation object contains the required * values as per defined in the specified app parameter. * @param app an RMApp instance that contains the required values * to test against. */ public static void verify(JSONObject info, RMApp app) throws JSONException { checkStringMatch("id", app.getApplicationId().toString(), info.getString("id")); checkStringMatch("user", app.getUser(), info.getString("user")); checkStringMatch("name", app.getName(), info.getString("name")); checkStringMatch("applicationType", app.getApplicationType(), info.getString("applicationType")); checkStringMatch("queue", app.getQueue(), info.getString("queue")); assertEquals(0, info.getInt("priority"), "priority doesn't match"); checkStringMatch("state", app.getState().toString(), info.getString("state")); checkStringMatch("finalStatus", app.getFinalApplicationStatus().toString(), info.getString("finalStatus")); assertEquals(0, (float) info.getDouble("progress"), 0.0, "progress doesn't match"); if ("UNASSIGNED".equals(info.getString("trackingUI"))) { checkStringMatch("trackingUI", "UNASSIGNED", info.getString("trackingUI")); } checkStringEqual("diagnostics", app.getDiagnostics().toString(), info.getString("diagnostics")); assertEquals(ResourceManager.getClusterTimeStamp(), info.getLong("clusterId"), "clusterId doesn't match"); assertEquals(app.getStartTime(), info.getLong("startedTime"), "startedTime doesn't match"); assertEquals(app.getFinishTime(), info.getLong("finishedTime"), "finishedTime doesn't match"); assertTrue(info.getLong("elapsedTime") > 0, "elapsed time not greater than 0"); checkStringMatch("amHostHttpAddress", app.getCurrentAppAttempt().getMasterContainer().getNodeHttpAddress(), info.getString("amHostHttpAddress")); assertTrue(info.getString("amContainerLogs").startsWith("http://"), "amContainerLogs doesn't match"); assertTrue(info.getString("amContainerLogs").endsWith("/" + app.getUser()), "amContainerLogs doesn't contain user info"); assertEquals(1024, info.getInt("allocatedMB"), "allocatedMB doesn't match"); assertEquals(1, info.getInt("allocatedVCores"), "allocatedVCores doesn't match"); assertEquals(50.0f, (float) info.getDouble("queueUsagePercentage"), 0.01f, "queueUsagePerc doesn't match"); assertEquals(50.0f, (float) info.getDouble("clusterUsagePercentage"), 0.01f, "clusterUsagePerc doesn't match"); assertEquals(1, info.getInt("runningContainers"), "numContainers doesn't match"); assertNotNull(info.get("preemptedResourceSecondsMap"), "preemptedResourceSecondsMap should not be null"); assertEquals(app.getRMAppMetrics().getResourcePreempted().getMemorySize(), info.getInt("preemptedResourceMB"), "preemptedResourceMB doesn't match"); assertEquals(app.getRMAppMetrics().getResourcePreempted().getVirtualCores(), info.getInt("preemptedResourceVCores"), "preemptedResourceVCores doesn't match"); assertEquals(app.getRMAppMetrics().getNumNonAMContainersPreempted(), info.getInt("numNonAMContainerPreempted"), "numNonAMContainerPreempted doesn't match"); assertEquals(app.getRMAppMetrics().getNumAMContainersPreempted(), info.getInt("numAMContainerPreempted"), "numAMContainerPreempted doesn't match"); assertEquals(app.getLogAggregationStatusForAppReport().toString(), info.getString("logAggregationStatus"), "Log aggregation Status doesn't match"); assertEquals(app.getApplicationSubmissionContext().getUnmanagedAM(), info.getBoolean("unmanagedApplication"), "unmanagedApplication doesn't match"); if (app.getApplicationSubmissionContext() .getNodeLabelExpression() != null) { assertEquals(app.getApplicationSubmissionContext().getNodeLabelExpression(), info.getString("appNodeLabelExpression"), "appNodeLabelExpression doesn't match"); } assertEquals(app.getAMResourceRequests().get(0).getNodeLabelExpression(), info.getString("amNodeLabelExpression"), "amNodeLabelExpression doesn't match"); assertEquals(AppInfo.getAmRPCAddressFromRMAppAttempt(app.getCurrentAppAttempt()), info.getString("amRPCAddress"), "amRPCAddress"); } }
AppInfoJsonVerifications
java
apache__camel
components/camel-ftp/src/test/java/org/apache/camel/component/file/remote/sftp/integration/SftpConsumerAutoCreateIT.java
{ "start": 1491, "end": 2937 }
class ____ extends SftpServerTestSupport { protected String getFtpUrl() { return "sftp://admin@localhost:{{ftp.server.port}}/{{ftp.root.dir}}/foo/bar/baz/xxx?password=admin&knownHostsFile=" + service.getKnownHostsFile(); } @AfterEach public void cleanupDir() { FileUtil.removeDir(new File(service.getFtpRootDir().toFile(), "/foo/bar/baz/xxx")); } @Test public void testAutoCreate() { SftpEndpoint endpoint = (SftpEndpoint) this.getMandatoryEndpoint(getFtpUrl() + "&autoCreate=true"); endpoint.start(); endpoint.getExchanges(); assertTrue(ftpFile("foo/bar/baz/xxx").toFile().exists()); // producer should create necessary subdirs template.sendBodyAndHeader(getFtpUrl(), "Hello World", Exchange.FILE_NAME, "sub1/sub2/hello.txt"); assertTrue(ftpFile("foo/bar/baz/xxx/sub1/sub2").toFile().exists()); // to see if another connect causes problems with autoCreate=true endpoint.stop(); endpoint.start(); endpoint.getExchanges(); } @Test public void testNoAutoCreate() { SftpEndpoint endpoint = (SftpEndpoint) this.getMandatoryEndpoint(getFtpUrl() + "&autoCreate=false"); endpoint.start(); assertThrows(GenericFileOperationFailedException.class, () -> endpoint.getExchanges(), "Should fail with 550 No such directory."); } }
SftpConsumerAutoCreateIT
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/parser/JSONReader_top.java
{ "start": 176, "end": 647 }
class ____ extends TestCase { public void test_int() throws Exception { JSONReader reader = new JSONReader(new StringReader("123")); Assert.assertEquals(new Integer(123), reader.readInteger()); reader.close(); } public void test_long() throws Exception { JSONReader reader = new JSONReader(new StringReader("123")); Assert.assertEquals(new Long(123), reader.readLong()); reader.close(); } }
JSONReader_top
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/cfg/Configuration.java
{ "start": 25886, "end": 26208 }
class ____ be null" ); } return addResource( hbmFileName( entityClass ) ); } private static String hbmFileName(Class<?> entityClass) { return entityClass.getName().replace( '.', '/' ) + ".hbm.xml"; } /** * Read metadata from the annotations associated with this class. * * @param annotatedClass The
cannot
java
apache__camel
components/camel-cxf/camel-cxf-rest/src/test/java/org/apache/camel/component/cxf/jaxrs/CustomExceptionMapper.java
{ "start": 1007, "end": 1345 }
class ____ implements ExceptionMapper<CustomException> { @Override public Response toResponse(CustomException exception) { Response.Status status; status = Response.Status.INTERNAL_SERVER_ERROR; return Response.status(status).header("exception", exception.getMessage()).build(); } }
CustomExceptionMapper
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/limit/Oracle12LimitTest.java
{ "start": 2142, "end": 2315 }
class ____ { @Id private Long id; @OneToMany private List<Travel> travels; public Person() { } private String name; } @Entity(name = "Travel") public
Person
java
apache__flink
flink-libraries/flink-cep/src/main/java/org/apache/flink/cep/functions/adaptors/PatternTimeoutFlatSelectAdapter.java
{ "start": 3464, "end": 4052 }
class ____<T> implements Collector<T> { private final OutputTag<T> timedOutPartialMatchesTag; private transient Context ctx; private SideCollector(OutputTag<T> timedOutPartialMatchesTag) { this.timedOutPartialMatchesTag = checkNotNull(timedOutPartialMatchesTag); } public void setCtx(Context ctx) { this.ctx = ctx; } @Override public void collect(T record) { ctx.output(timedOutPartialMatchesTag, record); } @Override public void close() {} } }
SideCollector
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/inject/dagger/UseBindsTest.java
{ "start": 6559, "end": 7233 }
class ____ {", " @" + bindingMethodAnnotation, " Random provideRandom(SecureRandom impl) {", " return impl;", " }", " @" + bindingMethodAnnotation, " SecureRandom provideSecureRandom() {", " return new SecureRandom();", " }", "}") .expectUnchanged() .doTest(); } @Test public void instanceProvidesMethodWithStaticSibling() { testHelper .addInputLines( "in/Test.java", "import java.security.SecureRandom;", "import java.util.Random;", "@" + moduleAnnotation, "
Test
java
lettuce-io__lettuce-core
src/main/java/io/lettuce/core/protocol/CommandArgs.java
{ "start": 19912, "end": 20030 }
interface ____<T> { void encode(ToByteBufEncoder<T, T> encoder, T item, ByteBuf target); } }
EncodeFunction
java
grpc__grpc-java
api/src/context/java/io/grpc/PersistentHashArrayMappedTrie.java
{ "start": 9286, "end": 9441 }
interface ____<K,V> { V get(K key, int hash, int bitsConsumed); Node<K,V> put(K key, V value, int hash, int bitsConsumed); int size(); } }
Node
java
elastic__elasticsearch
server/src/test/java/org/elasticsearch/common/io/stream/DelayableWriteableTests.java
{ "start": 988, "end": 1833 }
class ____ implements NamedWriteable { private final String s; Example(String s) { this.s = s; } Example(StreamInput in) throws IOException { s = in.readString(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(s); } @Override public String getWriteableName() { return "example"; } @Override public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) { return false; } Example other = (Example) obj; return s.equals(other.s); } @Override public int hashCode() { return s.hashCode(); } } private static
Example
java
apache__commons-lang
src/main/java/org/apache/commons/lang3/mutable/MutableFloat.java
{ "start": 1363, "end": 12803 }
class ____ extends Number implements Comparable<MutableFloat>, Mutable<Number> { /** * Required for serialization support. * * @see java.io.Serializable */ private static final long serialVersionUID = 5787169186L; /** The mutable value. */ private float value; /** * Constructs a new MutableFloat with the default value of zero. */ public MutableFloat() { } /** * Constructs a new MutableFloat with the specified value. * * @param value the initial value to store. */ public MutableFloat(final float value) { this.value = value; } /** * Constructs a new MutableFloat with the specified value. * * @param value the initial value to store, not null. * @throws NullPointerException if the object is null. */ public MutableFloat(final Number value) { this.value = value.floatValue(); } /** * Constructs a new MutableFloat parsing the given string. * * @param value the string to parse, not null. * @throws NumberFormatException if the string cannot be parsed into a float, see {@link Float#parseFloat(String)}. * @since 2.5 */ public MutableFloat(final String value) { this.value = Float.parseFloat(value); } /** * Adds a value to the value of this instance. * * @param operand the value to add, not null. * @since 2.2 */ public void add(final float operand) { this.value += operand; } /** * Adds a value to the value of this instance. * * @param operand the value to add, not null. * @throws NullPointerException if the object is null. * @since 2.2 */ public void add(final Number operand) { this.value += operand.floatValue(); } /** * Increments this instance's value by {@code operand}; this method returns the value associated with the instance * immediately after the addition operation. This method is not thread safe. * * @param operand the quantity to add, not null. * @return the value associated with this instance after adding the operand. * @since 3.5 */ public float addAndGet(final float operand) { this.value += operand; return value; } /** * Increments this instance's value by {@code operand}; this method returns the value associated with the instance * immediately after the addition operation. This method is not thread safe. * * @param operand the quantity to add, not null. * @throws NullPointerException if {@code operand} is null. * @return the value associated with this instance after adding the operand. * @since 3.5 */ public float addAndGet(final Number operand) { this.value += operand.floatValue(); return value; } /** * Compares this mutable to another in ascending order. * * @param other the other mutable to compare to, not null. * @return negative if this is less, zero if equal, positive if greater. */ @Override public int compareTo(final MutableFloat other) { return Float.compare(this.value, other.value); } /** * Decrements the value. * * @since 2.2 */ public void decrement() { value--; } /** * Decrements this instance's value by 1; this method returns the value associated with the instance * immediately after the decrement operation. This method is not thread safe. * * @return the value associated with the instance after it is decremented. * @since 3.5 */ public float decrementAndGet() { value--; return value; } /** * Returns the value of this MutableFloat as a double. * * @return the numeric value represented by this object after conversion to type double. */ @Override public double doubleValue() { return value; } /** * Compares this object against some other object. The result is {@code true} if and only if the argument is not {@code null} and is a {@link Float} object * that represents a {@code float} that has the identical bit pattern to the bit pattern of the {@code float} represented by this object. For this purpose, * two float values are considered to be the same if and only if the method {@link Float#floatToIntBits(float)}returns the same int value when applied to * each. * <p> * Note that in most cases, for two instances of class {@link Float},{@code f1} and {@code f2}, the value of {@code f1.equals(f2)} is {@code true} if and * only if: * </p> * <pre> * f1.floatValue() == f2.floatValue() * </pre> * <p> * also has the value {@code true}. However, there are two exceptions: * </p> * <ul> * <li>If {@code f1} and {@code f2} both represent {@code Float.NaN}, then the {@code equals} method returns {@code true}, even though * {@code Float.NaN == Float.NaN} has the value {@code false}. * <li>If {@code f1} represents {@code +0.0f} while {@code f2} represents {@code -0.0f}, or vice versa, the {@code equal} test has the value {@code false}, * even though {@code 0.0f == -0.0f} has the value {@code true}. * </ul> * <p> * This definition allows hashtables to operate properly. * </p> * * @param obj the object to compare with, null returns false. * @return {@code true} if the objects are the same; {@code false} otherwise. * @see Float#floatToIntBits(float) */ @Override public boolean equals(final Object obj) { return obj instanceof MutableFloat && Float.floatToIntBits(((MutableFloat) obj).value) == Float.floatToIntBits(value); } /** * Returns the value of this MutableFloat as a float. * * @return the numeric value represented by this object after conversion to type float. */ @Override public float floatValue() { return value; } /** * Increments this instance's value by {@code operand}; this method returns the value associated with the instance * immediately prior to the addition operation. This method is not thread safe. * * @param operand the quantity to add, not null. * @return the value associated with this instance immediately before the operand was added. * @since 3.5 */ public float getAndAdd(final float operand) { final float last = value; this.value += operand; return last; } /** * Increments this instance's value by {@code operand}; this method returns the value associated with the instance * immediately prior to the addition operation. This method is not thread safe. * * @param operand the quantity to add, not null. * @throws NullPointerException if {@code operand} is null. * @return the value associated with this instance immediately before the operand was added. * @since 3.5 */ public float getAndAdd(final Number operand) { final float last = value; this.value += operand.floatValue(); return last; } /** * Decrements this instance's value by 1; this method returns the value associated with the instance * immediately prior to the decrement operation. This method is not thread safe. * * @return the value associated with the instance before it was decremented. * @since 3.5 */ public float getAndDecrement() { final float last = value; value--; return last; } /** * Increments this instance's value by 1; this method returns the value associated with the instance * immediately prior to the increment operation. This method is not thread safe. * * @return the value associated with the instance before it was incremented. * @since 3.5 */ public float getAndIncrement() { final float last = value; value++; return last; } /** * Gets the value as a Float instance. * * @return the value as a Float, never null. * @deprecated Use {@link #get()}. */ @Deprecated @Override public Float getValue() { return Float.valueOf(this.value); } /** * Returns a suitable hash code for this mutable. * * @return a suitable hash code. */ @Override public int hashCode() { return Float.floatToIntBits(value); } /** * Increments the value. * * @since 2.2 */ public void increment() { value++; } /** * Increments this instance's value by 1; this method returns the value associated with the instance * immediately after the increment operation. This method is not thread safe. * * @return the value associated with the instance after it is incremented. * @since 3.5 */ public float incrementAndGet() { value++; return value; } // shortValue and byteValue rely on Number implementation /** * Returns the value of this MutableFloat as an int. * * @return the numeric value represented by this object after conversion to type int. */ @Override public int intValue() { return (int) value; } /** * Checks whether the float value is infinite. * * @return true if infinite */ public boolean isInfinite() { return Float.isInfinite(value); } /** * Checks whether the float value is the special NaN value. * * @return true if NaN. */ public boolean isNaN() { return Float.isNaN(value); } /** * Returns the value of this MutableFloat as a long. * * @return the numeric value represented by this object after conversion to type long. */ @Override public long longValue() { return (long) value; } /** * Sets the value. * * @param value the value to set. */ public void setValue(final float value) { this.value = value; } /** * Sets the value from any Number instance. * * @param value the value to set, not null. * @throws NullPointerException if the object is null. */ @Override public void setValue(final Number value) { this.value = value.floatValue(); } /** * Subtracts a value from the value of this instance. * * @param operand the value to subtract. * @since 2.2 */ public void subtract(final float operand) { this.value -= operand; } /** * Subtracts a value from the value of this instance. * * @param operand the value to subtract, not null. * @throws NullPointerException if the object is null. * @since 2.2 */ public void subtract(final Number operand) { this.value -= operand.floatValue(); } /** * Gets this mutable as an instance of Float. * * @return a Float instance containing the value from this mutable, never null. */ public Float toFloat() { return Float.valueOf(floatValue()); } /** * Returns the String value of this mutable. * * @return the mutable value as a string. */ @Override public String toString() { return String.valueOf(value); } }
MutableFloat
java
google__gson
test-shrinker/src/main/java/com/example/EnumClassWithSerializedName.java
{ "start": 81, "end": 186 }
enum ____ { @SerializedName("one") FIRST, @SerializedName("two") SECOND }
EnumClassWithSerializedName
java
apache__hadoop
hadoop-cloud-storage-project/hadoop-tos/src/test/java/org/apache/hadoop/fs/tosfs/TestTosChecksum.java
{ "start": 2085, "end": 4963 }
class ____ { private static final String FILE_STORE_ROOT = TempFiles.newTempDir("TestTosChecksum"); private static final String ALGORITHM_NAME = "mock-algorithm"; private static final String PREFIX = UUIDUtils.random(); private ObjectStorage objectStorage; @BeforeAll public static void beforeClass() { assumeTrue(TestEnv.checkTestEnabled()); } public void setObjectStorage(ObjectStorage objectStorage) { this.objectStorage = objectStorage; } static Stream<Arguments> provideArguments() throws URISyntaxException { List<Arguments> values = new ArrayList<>(); // Case 1: file store. Configuration fileStoreConf = new Configuration(); fileStoreConf.set(FileStoreKeys.FS_FILESTORE_CHECKSUM_ALGORITHM, ALGORITHM_NAME); fileStoreConf.set(FileStoreKeys.FS_FILESTORE_CHECKSUM_TYPE, ChecksumType.MD5.name()); fileStoreConf.set(ConfKeys.FS_OBJECT_STORAGE_ENDPOINT.key("filestore"), FILE_STORE_ROOT); URI uri0 = new URI("filestore://" + TestUtility.bucket() + "/"); values.add(Arguments.of( ChecksumType.MD5, fileStoreConf, uri0, ObjectStorageFactory.create(uri0.getScheme(), uri0.getAuthority(), fileStoreConf) )); // Case 2: tos. Configuration tosConf = new Configuration(); tosConf.set(TosKeys.FS_TOS_CHECKSUM_ALGORITHM, ALGORITHM_NAME); tosConf.set(TosKeys.FS_TOS_CHECKSUM_TYPE, ChecksumType.CRC32C.name()); URI uri1 = new URI(TOS_SCHEME + "://" + TestUtility.bucket() + "/"); values.add(Arguments.of( ChecksumType.CRC32C, tosConf, uri1, ObjectStorageFactory.create(uri1.getScheme(), uri1.getAuthority(), tosConf) )); return values.stream(); } @AfterEach public void tearDown() { objectStorage.deleteAll(PREFIX); } @ParameterizedTest @MethodSource("provideArguments") public void testChecksumInfo(ChecksumType type, Configuration conf, URI uri, ObjectStorage objectStore) { setObjectStorage(objectStore); assertEquals(ALGORITHM_NAME, objectStore.checksumInfo().algorithm()); assertEquals(type, objectStore.checksumInfo().checksumType()); } @ParameterizedTest @MethodSource("provideArguments") public void testFileChecksum(ChecksumType type, Configuration conf, URI uri, ObjectStorage objectStore) throws Exception { setObjectStorage(objectStore); try (RawFileSystem fs = new RawFileSystem()) { fs.initialize(uri, conf); Path file = new Path("/" + PREFIX, "testFileChecksum"); fs.create(file).close(); FileChecksum checksum = fs.getFileChecksum(file, Long.MAX_VALUE); assertEquals(ALGORITHM_NAME, checksum.getAlgorithmName()); String key = file.toString().substring(1); byte[] checksumData = objectStore.head(key).checksum(); assertArrayEquals(checksumData, checksum.getBytes()); } } }
TestTosChecksum
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogBackupInputStream.java
{ "start": 1796, "end": 4051 }
class ____ extends ByteArrayInputStream { ByteBufferInputStream() { super(new byte[0]); } void setData(byte[] newBytes) { super.buf = newBytes; super.count = newBytes == null ? 0 : newBytes.length; super.mark = 0; reset(); } /** * Number of bytes read from the stream so far. */ int length() { return count; } } EditLogBackupInputStream(String name) throws IOException { address = name; inner = new ByteBufferInputStream(); in = null; reader = null; } @Override public String getName() { return address; } @Override protected FSEditLogOp nextOp() throws IOException { Preconditions.checkState(reader != null, "Must call setBytes() before readOp()"); return reader.readOp(false); } @Override protected FSEditLogOp nextValidOp() { try { return reader.readOp(true); } catch (IOException e) { throw new RuntimeException("got unexpected IOException " + e, e); } } @Override public int getVersion(boolean verifyVersion) throws IOException { return this.version; } @Override public long getPosition() { return tracker.getPos(); } @Override public void close() throws IOException { in.close(); } @Override public long length() throws IOException { // file size + size of both buffers return inner.length(); } void setBytes(byte[] newBytes, int version) throws IOException { inner.setData(newBytes); tracker = new FSEditLogLoader.PositionTrackingInputStream(inner); in = new DataInputStream(tracker); this.version = version; reader = FSEditLogOp.Reader.create(in, tracker, version); } void clear() throws IOException { setBytes(null, 0); reader = null; this.version = 0; } @Override public long getFirstTxId() { return HdfsServerConstants.INVALID_TXID; } @Override public long getLastTxId() { return HdfsServerConstants.INVALID_TXID; } @Override public boolean isInProgress() { return true; } @Override public void setMaxOpSize(int maxOpSize) { reader.setMaxOpSize(maxOpSize); } @Override public boolean isLocalLog() { return true; } }
ByteBufferInputStream
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java
{ "start": 3252, "end": 26070 }
class ____ { { GenericTestUtils.setLogLevel(INode.LOG, Level.TRACE); SnapshotTestHelper.disableLogs(); } private static final long seed; private static final Random random; static { seed = Time.now(); random = new Random(seed); System.out.println("Random seed: " + seed); } protected static final short REPLICATION = 3; protected static final int BLOCKSIZE = 1024; /** The number of times snapshots are created for a snapshottable directory */ public static final int SNAPSHOT_ITERATION_NUMBER = 20; /** Height of directory tree used for testing */ public static final int DIRECTORY_TREE_LEVEL = 5; protected Configuration conf; protected static MiniDFSCluster cluster; protected static FSNamesystem fsn; protected static FSDirectory fsdir; protected DistributedFileSystem hdfs; private static final String testDir = GenericTestUtils.getTestDir().getAbsolutePath(); /** * The list recording all previous snapshots. Each element in the array * records a snapshot root. */ protected static final ArrayList<Path> snapshotList = new ArrayList<Path>(); /** * Check {@link SnapshotTestHelper.TestDirectoryTree} */ private TestDirectoryTree dirTree; @BeforeEach public void setUp() throws Exception { conf = new Configuration(); conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCKSIZE); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(REPLICATION) .build(); cluster.waitActive(); fsn = cluster.getNamesystem(); fsdir = fsn.getFSDirectory(); hdfs = cluster.getFileSystem(); dirTree = new TestDirectoryTree(DIRECTORY_TREE_LEVEL, hdfs); } @AfterEach public void tearDown() throws Exception { if (cluster != null) { cluster.shutdown(); cluster = null; } } static int modificationCount = 0; /** * Make changes (modification, deletion, creation) to the current files/dir. * Then check if the previous snapshots are still correct. * * @param modifications Modifications that to be applied to the current dir. */ private void modifyCurrentDirAndCheckSnapshots(Modification[] modifications) throws Exception { for (Modification modification : modifications) { System.out.println(++modificationCount + ") " + modification); modification.loadSnapshots(); modification.modify(); modification.checkSnapshots(); } } /** * Create two snapshots in each iteration. Each time we will create a snapshot * for the top node, then randomly pick a dir in the tree and create * snapshot for it. * * Finally check the snapshots are created correctly. */ protected TestDirectoryTree.Node[] createSnapshots() throws Exception { TestDirectoryTree.Node[] nodes = new TestDirectoryTree.Node[2]; // Each time we will create a snapshot for the top level dir Path root = SnapshotTestHelper.createSnapshot(hdfs, dirTree.topNode.nodePath, nextSnapshotName()); snapshotList.add(root); nodes[0] = dirTree.topNode; SnapshotTestHelper.checkSnapshotCreation(hdfs, root, nodes[0].nodePath); // Then randomly pick one dir from the tree (cannot be the top node) and // create snapshot for it ArrayList<TestDirectoryTree.Node> excludedList = new ArrayList<TestDirectoryTree.Node>(); excludedList.add(nodes[0]); nodes[1] = dirTree.getRandomDirNode(random, excludedList); root = SnapshotTestHelper.createSnapshot(hdfs, nodes[1].nodePath, nextSnapshotName()); snapshotList.add(root); SnapshotTestHelper.checkSnapshotCreation(hdfs, root, nodes[1].nodePath); return nodes; } private File getDumpTreeFile(String dir, String suffix) { return new File(dir, String.format("dumptree_%s", suffix)); } /** * Restart the cluster to check edit log applying and fsimage saving/loading */ private void checkFSImage() throws Exception { File fsnBefore = getDumpTreeFile(testDir, "before"); File fsnMiddle = getDumpTreeFile(testDir, "middle"); File fsnAfter = getDumpTreeFile(testDir, "after"); SnapshotTestHelper.dumpTree2File(fsdir, fsnBefore); cluster.shutdown(); cluster = new MiniDFSCluster.Builder(conf).format(false) .numDataNodes(REPLICATION).build(); cluster.waitActive(); fsn = cluster.getNamesystem(); hdfs = cluster.getFileSystem(); // later check fsnMiddle to see if the edit log is applied correctly SnapshotTestHelper.dumpTree2File(fsdir, fsnMiddle); // save namespace and restart cluster hdfs.setSafeMode(SafeModeAction.ENTER); hdfs.saveNamespace(); hdfs.setSafeMode(SafeModeAction.LEAVE); cluster.shutdown(); cluster = new MiniDFSCluster.Builder(conf).format(false) .numDataNodes(REPLICATION).build(); cluster.waitActive(); fsn = cluster.getNamesystem(); hdfs = cluster.getFileSystem(); // dump the namespace loaded from fsimage SnapshotTestHelper.dumpTree2File(fsdir, fsnAfter); SnapshotTestHelper.compareDumpedTreeInFile(fsnBefore, fsnMiddle, true); SnapshotTestHelper.compareDumpedTreeInFile(fsnBefore, fsnAfter, true); } /** * Main test, where we will go in the following loop: * <pre> * Create snapshot and check the creation <--+ * -> Change the current/live files/dir | * -> Check previous snapshots -----------------+ * </pre> */ @Test public void testSnapshot() throws Throwable { try { runTestSnapshot(SNAPSHOT_ITERATION_NUMBER); } catch(Throwable t) { SnapshotTestHelper.LOG.info("FAILED", t); SnapshotTestHelper.dumpTree("FAILED", cluster); throw t; } } /** * Test if the OfflineImageViewerPB can correctly parse a fsimage containing * snapshots */ @Test public void testOfflineImageViewer() throws Exception { runTestSnapshot(1); // retrieve the fsimage. Note that we already save namespace to fsimage at // the end of each iteration of runTestSnapshot. File originalFsimage = FSImageTestUtil.findLatestImageFile( FSImageTestUtil.getFSImage( cluster.getNameNode()).getStorage().getStorageDir(0)); assertNotNull(originalFsimage, "Didn't generate or can't find fsimage"); PrintStream o = new PrintStream(NullOutputStream.INSTANCE); PBImageXmlWriter v = new PBImageXmlWriter(new Configuration(), o); v.visit(new RandomAccessFile(originalFsimage, "r")); } private void runTestSnapshot(int iteration) throws Exception { for (int i = 0; i < iteration; i++) { // create snapshot and check the creation cluster.getNamesystem().getSnapshotManager().setAllowNestedSnapshots(true); TestDirectoryTree.Node[] ssNodes = createSnapshots(); // prepare the modifications for the snapshotted dirs // we cover the following directories: top, new, and a random ArrayList<TestDirectoryTree.Node> excludedList = new ArrayList<TestDirectoryTree.Node>(); TestDirectoryTree.Node[] modNodes = new TestDirectoryTree.Node[ssNodes.length + 1]; for (int n = 0; n < ssNodes.length; n++) { modNodes[n] = ssNodes[n]; excludedList.add(ssNodes[n]); } modNodes[modNodes.length - 1] = dirTree.getRandomDirNode(random, excludedList); Modification[] mods = prepareModifications(modNodes); // make changes to the directories/files modifyCurrentDirAndCheckSnapshots(mods); // also update the metadata of directories TestDirectoryTree.Node chmodDir = dirTree.getRandomDirNode(random, null); Modification chmod = new FileChangePermission(chmodDir.nodePath, hdfs, genRandomPermission()); String[] userGroup = genRandomOwner(); TestDirectoryTree.Node chownDir = dirTree.getRandomDirNode(random, Arrays.asList(chmodDir)); Modification chown = new FileChown(chownDir.nodePath, hdfs, userGroup[0], userGroup[1]); modifyCurrentDirAndCheckSnapshots(new Modification[]{chmod, chown}); // check fsimage saving/loading checkFSImage(); } } /** * A simple test that updates a sub-directory of a snapshottable directory * with snapshots */ @Test @Timeout(value = 60) public void testUpdateDirectory() throws Exception { Path dir = new Path("/dir"); Path sub = new Path(dir, "sub"); Path subFile = new Path(sub, "file"); DFSTestUtil.createFile(hdfs, subFile, BLOCKSIZE, REPLICATION, seed); FileStatus oldStatus = hdfs.getFileStatus(sub); hdfs.allowSnapshot(dir); hdfs.createSnapshot(dir, "s1"); hdfs.setTimes(sub, 100L, 100L); Path snapshotPath = SnapshotTestHelper.getSnapshotPath(dir, "s1", "sub"); FileStatus snapshotStatus = hdfs.getFileStatus(snapshotPath); assertEquals(oldStatus.getModificationTime(), snapshotStatus.getModificationTime()); assertEquals(oldStatus.getAccessTime(), snapshotStatus.getAccessTime()); } /** * Test creating a snapshot with illegal name */ @Test public void testCreateSnapshotWithIllegalName() throws Exception { final Path dir = new Path("/dir"); hdfs.mkdirs(dir); final String name1 = HdfsConstants.DOT_SNAPSHOT_DIR; try { hdfs.createSnapshot(dir, name1); fail("Exception expected when an illegal name is given"); } catch (RemoteException e) { String errorMsg = "Invalid path name Invalid snapshot name: " + name1; GenericTestUtils.assertExceptionContains(errorMsg, e); } final String[] badNames = new String[] { "foo" + Path.SEPARATOR, Path.SEPARATOR + "foo", Path.SEPARATOR, "foo" + Path.SEPARATOR + "bar" }; for (String badName : badNames) { try { hdfs.createSnapshot(dir, badName); fail("Exception expected when an illegal name is given"); } catch (RemoteException e) { String errorMsg = "Invalid path name Invalid snapshot name: " + badName ; GenericTestUtils.assertExceptionContains(errorMsg, e); } } } /** * Creating snapshots for a directory that is not snapshottable must fail. */ @Test @Timeout(value = 60) public void testSnapshottableDirectory() throws Exception { Path dir = new Path("/TestSnapshot/sub"); Path file0 = new Path(dir, "file0"); Path file1 = new Path(dir, "file1"); DFSTestUtil.createFile(hdfs, file0, BLOCKSIZE, REPLICATION, seed); DFSTestUtil.createFile(hdfs, file1, BLOCKSIZE, REPLICATION, seed); try { hdfs.createSnapshot(dir, "s1"); fail("Exception expected: " + dir + " is not snapshottable"); } catch (IOException e) { GenericTestUtils.assertExceptionContains( "Directory is not a snapshottable directory: " + dir, e); } try { hdfs.deleteSnapshot(dir, "s1"); fail("Exception expected: " + dir + " is not a snapshottale dir"); } catch (Exception e) { GenericTestUtils.assertExceptionContains( "Directory is not a snapshottable directory: " + dir, e); } try { hdfs.renameSnapshot(dir, "s1", "s2"); fail("Exception expected: " + dir + " is not a snapshottale dir"); } catch (Exception e) { GenericTestUtils.assertExceptionContains( "Directory is not a snapshottable directory: " + dir, e); } } /** * Test multiple calls of allowSnapshot and disallowSnapshot, to make sure * they are idempotent */ @Test public void testAllowAndDisallowSnapshot() throws Exception { final Path dir = new Path("/dir"); final Path file0 = new Path(dir, "file0"); final Path file1 = new Path(dir, "file1"); DFSTestUtil.createFile(hdfs, file0, BLOCKSIZE, REPLICATION, seed); DFSTestUtil.createFile(hdfs, file1, BLOCKSIZE, REPLICATION, seed); INodeDirectory dirNode = fsdir.getINode4Write(dir.toString()).asDirectory(); assertFalse(dirNode.isSnapshottable()); hdfs.allowSnapshot(dir); dirNode = fsdir.getINode4Write(dir.toString()).asDirectory(); assertTrue(dirNode.isSnapshottable()); // call allowSnapshot again hdfs.allowSnapshot(dir); dirNode = fsdir.getINode4Write(dir.toString()).asDirectory(); assertTrue(dirNode.isSnapshottable()); // disallowSnapshot on dir hdfs.disallowSnapshot(dir); dirNode = fsdir.getINode4Write(dir.toString()).asDirectory(); assertFalse(dirNode.isSnapshottable()); // do it again hdfs.disallowSnapshot(dir); dirNode = fsdir.getINode4Write(dir.toString()).asDirectory(); assertFalse(dirNode.isSnapshottable()); // same process on root final Path root = new Path("/"); INodeDirectory rootNode = fsdir.getINode4Write(root.toString()) .asDirectory(); assertTrue(rootNode.isSnapshottable()); // root is snapshottable dir, but with 0 snapshot quota assertEquals(0, rootNode.getDirectorySnapshottableFeature() .getSnapshotQuota()); hdfs.allowSnapshot(root); rootNode = fsdir.getINode4Write(root.toString()).asDirectory(); assertTrue(rootNode.isSnapshottable()); assertEquals(DirectorySnapshottableFeature.SNAPSHOT_QUOTA_DEFAULT, rootNode.getDirectorySnapshottableFeature().getSnapshotQuota()); // call allowSnapshot again hdfs.allowSnapshot(root); rootNode = fsdir.getINode4Write(root.toString()).asDirectory(); assertTrue(rootNode.isSnapshottable()); assertEquals(DirectorySnapshottableFeature.SNAPSHOT_QUOTA_DEFAULT, rootNode.getDirectorySnapshottableFeature().getSnapshotQuota()); // disallowSnapshot on dir hdfs.disallowSnapshot(root); rootNode = fsdir.getINode4Write(root.toString()).asDirectory(); assertTrue(rootNode.isSnapshottable()); assertEquals(0, rootNode.getDirectorySnapshottableFeature().getSnapshotQuota()); // do it again hdfs.disallowSnapshot(root); rootNode = fsdir.getINode4Write(root.toString()).asDirectory(); assertTrue(rootNode.isSnapshottable()); assertEquals(0, rootNode.getDirectorySnapshottableFeature().getSnapshotQuota()); } @Test @Timeout(value = 60) public void testSnapshotMtime() throws Exception { Path dir = new Path("/dir"); Path sub = new Path(dir, "sub"); Path subFile = new Path(sub, "file"); DFSTestUtil.createFile(hdfs, subFile, BLOCKSIZE, REPLICATION, seed); hdfs.allowSnapshot(dir); Path snapshotPath = hdfs.createSnapshot(dir, "s1"); FileStatus oldSnapshotStatus = hdfs.getFileStatus(snapshotPath); cluster.restartNameNodes(); FileStatus newSnapshotStatus = hdfs.getFileStatus(snapshotPath); assertEquals(oldSnapshotStatus.getModificationTime(), newSnapshotStatus.getModificationTime()); } @Test @Timeout(value = 60) public void testRenameSnapshotMtime() throws Exception { Path dir = new Path("/dir"); Path sub = new Path(dir, "sub"); Path subFile = new Path(sub, "file"); DFSTestUtil.createFile(hdfs, subFile, BLOCKSIZE, REPLICATION, seed); hdfs.allowSnapshot(dir); Path snapshotPath = hdfs.createSnapshot(dir, "s1"); FileStatus oldSnapshotStatus = hdfs.getFileStatus(snapshotPath); hdfs.renameSnapshot(dir, "s1", "s2"); Path snapshotRenamePath = new Path("/dir/.snapshot/s2"); FileStatus newSnapshotStatus = hdfs.getFileStatus(snapshotRenamePath); assertNotEquals(oldSnapshotStatus.getModificationTime(), newSnapshotStatus.getModificationTime()); } /** * Test snapshot directory mtime after snapshot deletion. */ @Test @Timeout(value = 60) public void testDeletionSnapshotMtime() throws Exception { Path dir = new Path("/dir"); Path sub = new Path(dir, "sub"); Path subFile = new Path(sub, "file"); DFSTestUtil.createFile(hdfs, subFile, BLOCKSIZE, REPLICATION, seed); hdfs.allowSnapshot(dir); Path snapshotPath = hdfs.createSnapshot(dir, "s1"); FileStatus oldSnapshotStatus = hdfs.getFileStatus(snapshotPath); hdfs.deleteSnapshot(dir, "s1"); FileStatus dirStatus = hdfs.getFileStatus(dir); assertNotEquals(dirStatus.getModificationTime(), oldSnapshotStatus.getModificationTime()); cluster.restartNameNodes(); FileStatus newSnapshotStatus = hdfs.getFileStatus(dir); assertEquals(dirStatus.getModificationTime(), newSnapshotStatus.getModificationTime()); } /** * HDFS-15446 - ensure that snapshot operations on /.reserved/raw * paths work and the NN can load the resulting edits. */ @Test @Timeout(value = 60) public void testSnapshotOpsOnReservedPath() throws Exception { Path dir = new Path("/dir"); Path nestedDir = new Path("/nested/dir"); Path sub = new Path(dir, "sub"); Path subFile = new Path(sub, "file"); Path nestedFile = new Path(nestedDir, "file"); DFSTestUtil.createFile(hdfs, subFile, BLOCKSIZE, REPLICATION, seed); DFSTestUtil.createFile(hdfs, nestedFile, BLOCKSIZE, REPLICATION, seed); hdfs.allowSnapshot(dir); hdfs.allowSnapshot(nestedDir); Path reservedDir = new Path("/.reserved/raw/dir"); Path reservedNestedDir = new Path("/.reserved/raw/nested/dir"); hdfs.createSnapshot(reservedDir, "s1"); hdfs.createSnapshot(reservedNestedDir, "s1"); hdfs.renameSnapshot(reservedDir, "s1", "s2"); hdfs.renameSnapshot(reservedNestedDir, "s1", "s2"); hdfs.deleteSnapshot(reservedDir, "s2"); hdfs.deleteSnapshot(reservedNestedDir, "s2"); // The original problem with reserved path, is that the NN was unable to // replay the edits, therefore restarting the NN to ensure it starts // and no exceptions are raised. cluster.restartNameNode(true); } /** * HDFS-15446 - ensure that snapshot operations on /.reserved/raw * paths work and the NN can load the resulting edits. This test if for * snapshots at the root level. */ @Test @Timeout(value = 120) public void testSnapshotOpsOnRootReservedPath() throws Exception { Path dir = new Path("/"); Path sub = new Path(dir, "sub"); Path subFile = new Path(sub, "file"); DFSTestUtil.createFile(hdfs, subFile, BLOCKSIZE, REPLICATION, seed); hdfs.allowSnapshot(dir); Path reservedDir = new Path("/.reserved/raw"); hdfs.createSnapshot(reservedDir, "s1"); hdfs.renameSnapshot(reservedDir, "s1", "s2"); hdfs.deleteSnapshot(reservedDir, "s2"); // The original problem with reserved path, is that the NN was unable to // replay the edits, therefore restarting the NN to ensure it starts // and no exceptions are raised. cluster.restartNameNode(true); } /** * Prepare a list of modifications. A modification may be a file creation, * file deletion, or a modification operation such as appending to an existing * file. */ private Modification[] prepareModifications(TestDirectoryTree.Node[] nodes) throws Exception { ArrayList<Modification> mList = new ArrayList<Modification>(); for (TestDirectoryTree.Node node : nodes) { // If the node does not have files in it, create files if (node.fileList == null) { node.initFileList(hdfs, node.nodePath.getName(), BLOCKSIZE, REPLICATION, seed, 6); } // // Modification iterations are as follows: // Iteration 0 - create:fileList[5], delete:fileList[0], // append:fileList[1], chmod:fileList[2], // chown:fileList[3], change_replication:fileList[4]. // Set nullFileIndex to 0 // // Iteration 1 - create:fileList[0], delete:fileList[1], // append:fileList[2], chmod:fileList[3], // chown:fileList[4], change_replication:fileList[5] // Set nullFileIndex to 1 // // Iteration 2 - create:fileList[1], delete:fileList[2], // append:fileList[3], chmod:fileList[4], // chown:fileList[5], change_replication:fileList[6] // Set nullFileIndex to 2 // ... // Modification create = new FileCreation( node.fileList.get(node.nullFileIndex), hdfs, BLOCKSIZE); Modification delete = new FileDeletion( node.fileList.get((node.nullFileIndex + 1) % node.fileList.size()), hdfs); Path f = node.fileList.get((node.nullFileIndex + 2) % node.fileList.size()); Modification append = new FileAppend(f, hdfs, BLOCKSIZE); FileAppendNotClose appendNotClose = new FileAppendNotClose(f, hdfs, BLOCKSIZE); Modification appendClose = new FileAppendClose(f, hdfs, BLOCKSIZE, appendNotClose); Modification chmod = new FileChangePermission( node.fileList.get((node.nullFileIndex + 3) % node.fileList.size()), hdfs, genRandomPermission()); String[] userGroup = genRandomOwner(); Modification chown = new FileChown( node.fileList.get((node.nullFileIndex + 4) % node.fileList.size()), hdfs, userGroup[0], userGroup[1]); Modification replication = new FileChangeReplication( node.fileList.get((node.nullFileIndex + 5) % node.fileList.size()), hdfs, (short) (random.nextInt(REPLICATION) + 1)); node.nullFileIndex = (node.nullFileIndex + 1) % node.fileList.size(); Modification dirChange = new DirCreationOrDeletion(node.nodePath, hdfs, node, random.nextBoolean()); // dir rename Node dstParent = dirTree.getRandomDirNode(random, Arrays.asList(nodes)); Modification dirRename = new DirRename(node.nodePath, hdfs, node, dstParent); mList.add(create); mList.add(delete); mList.add(append); mList.add(appendNotClose); mList.add(appendClose); mList.add(chmod); mList.add(chown); mList.add(replication); mList.add(dirChange); mList.add(dirRename); } return mList.toArray(new Modification[mList.size()]); } /** * @return A random FsPermission */ private FsPermission genRandomPermission() { // randomly select between "rwx" and "rw-" FsAction u = random.nextBoolean() ? FsAction.ALL : FsAction.READ_WRITE; FsAction g = random.nextBoolean() ? FsAction.ALL : FsAction.READ_WRITE; FsAction o = random.nextBoolean() ? FsAction.ALL : FsAction.READ_WRITE; return new FsPermission(u, g, o); } /** * @return A string array containing two string: the first string indicates * the owner, and the other indicates the group */ private String[] genRandomOwner() { String[] userGroup = new String[]{"dr.who", "unknown"}; return userGroup; } private static int snapshotCount = 0; /** @return The next snapshot name */ static String nextSnapshotName() { return String.format("s-%d", ++snapshotCount); } /** * Base
TestSnapshot
java
spring-projects__spring-boot
documentation/spring-boot-actuator-docs/src/test/java/org/springframework/boot/actuate/docs/AbstractEndpointDocumentationTests.java
{ "start": 3089, "end": 3348 }
class ____ tests that generate endpoint documentation using Spring REST * Docs. * * @author Andy Wilkinson */ @TestPropertySource(properties = { "management.endpoints.web.exposure.include=*" }) @Import(BaseDocumentationConfiguration.class) public abstract
for
java
elastic__elasticsearch
modules/repository-s3/qa/web-identity-token/src/test/java/org/elasticsearch/repositories/s3/CustomWebIdentityTokenCredentialsProviderTests.java
{ "start": 8369, "end": 10971 }
class ____ implements Consumer<String> { private Consumer<String> delegate; private DelegatingConsumer(Consumer<String> delegate) { this.delegate = delegate; } private void setDelegate(Consumer<String> delegate) { this.delegate = delegate; } @Override public void accept(String s) { delegate.accept(s); } } @SuppressForbidden(reason = "HTTP server is used for testing") public void testPickUpNewWebIdentityTokenWhenItsChanged() throws Exception { DelegatingConsumer webIdentityTokenCheck = new DelegatingConsumer(s -> assertEquals("YXdzLXdlYi1pZGVudGl0eS10b2tlbi1maWxl", s)); HttpServer httpServer = getHttpServer(webIdentityTokenCheck); Environment environment = getEnvironment(); Map<String, String> environmentVariables = environmentVariables(); Map<String, String> systemProperties = getSystemProperties(httpServer); var webIdentityTokenCredentialsProvider = new S3Service.CustomWebIdentityTokenCredentialsProvider( environment, environmentVariables::get, systemProperties::getOrDefault, Clock.fixed(Instant.ofEpochMilli(1651084775908L), ZoneOffset.UTC), resourceWatcherService ); try { AwsCredentialsProvider awsCredentialsProvider = S3Service.buildCredentials( LogManager.getLogger(S3Service.class), S3ClientSettings.getClientSettings(Settings.EMPTY, randomAlphaOfLength(8)), webIdentityTokenCredentialsProvider ); assertCredentials(awsCredentialsProvider.resolveCredentials()); var latch = new CountDownLatch(1); String newWebIdentityToken = "88f84342080d4671a511e10ae905b2b0"; webIdentityTokenCheck.setDelegate(s -> { if (s.equals(newWebIdentityToken)) { latch.countDown(); } }); Files.writeString(environment.configDir().resolve("repository-s3/aws-web-identity-token-file"), newWebIdentityToken); do { // re-resolve credentials in order to trigger a refresh assertCredentials(awsCredentialsProvider.resolveCredentials()); } while (latch.await(500, TimeUnit.MILLISECONDS) == false); assertCredentials(awsCredentialsProvider.resolveCredentials()); } finally { webIdentityTokenCredentialsProvider.close(); httpServer.stop(0); } } }
DelegatingConsumer
java
apache__hadoop
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java
{ "start": 87156, "end": 89435 }
class ____ implements MultipleArcTransition<TaskAttemptImpl, TaskAttemptEvent, TaskAttemptStateInternal> { @Override public TaskAttemptStateInternal transition(TaskAttemptImpl taskAttempt, TaskAttemptEvent event) { TaskAttemptRecoverEvent tare = (TaskAttemptRecoverEvent) event; return taskAttempt.recover(tare.getTaskAttemptInfo(), tare.getCommitter(), tare.getRecoverOutput()); } } @SuppressWarnings({ "unchecked" }) private void logAttemptFinishedEvent(TaskAttemptStateInternal state) { //Log finished events only if an attempt started. if (getLaunchTime() == 0) return; String containerHostName = this.container == null ? "UNKNOWN" : this.container.getNodeId().getHost(); int containerNodePort = this.container == null ? -1 : this.container.getNodeId().getPort(); if (attemptId.getTaskId().getTaskType() == TaskType.MAP) { MapAttemptFinishedEvent mfe = new MapAttemptFinishedEvent(TypeConverter.fromYarn(attemptId), TypeConverter.fromYarn(attemptId.getTaskId().getTaskType()), state.toString(), this.reportedStatus.mapFinishTime, finishTime, containerHostName, containerNodePort, this.nodeRackName == null ? "UNKNOWN" : this.nodeRackName, this.reportedStatus.stateString, getCounters(), getProgressSplitBlock().burst(), launchTime); eventHandler.handle( new JobHistoryEvent(attemptId.getTaskId().getJobId(), mfe)); } else { ReduceAttemptFinishedEvent rfe = new ReduceAttemptFinishedEvent(TypeConverter.fromYarn(attemptId), TypeConverter.fromYarn(attemptId.getTaskId().getTaskType()), state.toString(), this.reportedStatus.shuffleFinishTime, this.reportedStatus.sortFinishTime, finishTime, containerHostName, containerNodePort, this.nodeRackName == null ? "UNKNOWN" : this.nodeRackName, this.reportedStatus.stateString, getCounters(), getProgressSplitBlock().burst(), launchTime); eventHandler.handle( new JobHistoryEvent(attemptId.getTaskId().getJobId(), rfe)); } } private static
RecoverTransition
java
mybatis__mybatis-3
src/test/java/org/apache/ibatis/submitted/uuid_test/Mapper.java
{ "start": 797, "end": 965 }
interface ____ { User getUser(UUID id); void insertUser(User user); @Insert("${sql}") int insertDynamicUser(String sql, Map<String, Object> parameters); }
Mapper
java
apache__flink
flink-connectors/flink-connector-files/src/main/java/org/apache/flink/connector/file/table/FileSystemConnectorOptions.java
{ "start": 17967, "end": 19245 }
enum ____ implements DescribedEnum { PROCESS_TIME( "process-time", text( "Based on the time of the machine, requires neither partition time extraction nor watermark generation. " + "Commits partition once the current system time passes partition creation system time plus delay.")), PARTITION_TIME( "partition-time", text( "Based on the time extracted from partition values, requires watermark generation. " + "Commits partition once the watermark passes the time extracted from partition values plus delay.")); private final String value; private final InlineElement description; PartitionCommitTriggerType(String value, InlineElement description) { this.value = value; this.description = description; } @Override public String toString() { return value; } @Override public InlineElement getDescription() { return description; } } /** Statistics types for file system, see {@link #SOURCE_REPORT_STATISTICS}. */ public
PartitionCommitTriggerType
java
apache__rocketmq
client/src/main/java/org/apache/rocketmq/client/consumer/MQPullConsumerScheduleService.java
{ "start": 5858, "end": 6595 }
class ____ implements MessageQueueListener { @Override public void messageQueueChanged(String topic, Set<MessageQueue> mqAll, Set<MessageQueue> mqDivided) { MessageModel messageModel = MQPullConsumerScheduleService.this.defaultMQPullConsumer.getMessageModel(); switch (messageModel) { case BROADCASTING: MQPullConsumerScheduleService.this.putTask(topic, mqAll); break; case CLUSTERING: MQPullConsumerScheduleService.this.putTask(topic, mqDivided); break; default: break; } } } public
MessageQueueListenerImpl
java
quarkusio__quarkus
extensions/oidc-common/runtime/src/main/java/io/quarkus/oidc/common/runtime/config/OidcCommonConfigBuilder.java
{ "start": 1365, "end": 9194 }
class ____ implements OidcCommonConfig { private final Optional<String> authServerUrl; private final Optional<Boolean> discoveryEnabled; private final Optional<String> registrationPath; private final Optional<Duration> connectionDelay; private final int connectionRetryCount; private final Duration connectionTimeout; private final boolean useBlockingDnsLookup; private final OptionalInt maxPoolSize; private final boolean followRedirects; private final Proxy proxy; private final Tls tls; protected OidcCommonConfigImpl(OidcCommonConfigBuilder<?> builder) { this.authServerUrl = builder.authServerUrl; this.discoveryEnabled = builder.discoveryEnabled; this.registrationPath = builder.registrationPath; this.connectionDelay = builder.connectionDelay; this.connectionRetryCount = builder.connectionRetryCount; this.connectionTimeout = builder.connectionTimeout; this.useBlockingDnsLookup = builder.useBlockingDnsLookup; this.maxPoolSize = builder.maxPoolSize; this.followRedirects = builder.followRedirects; this.proxy = new ProxyImpl(builder.proxyHost, builder.proxyPort, builder.proxyUsername, builder.proxyPassword); this.tls = builder.tls; } @Override public Optional<String> authServerUrl() { return authServerUrl; } @Override public Optional<Boolean> discoveryEnabled() { return discoveryEnabled; } @Override public Optional<String> registrationPath() { return registrationPath; } @Override public Optional<Duration> connectionDelay() { return connectionDelay; } @Override public int connectionRetryCount() { return connectionRetryCount; } @Override public Duration connectionTimeout() { return connectionTimeout; } @Override public boolean useBlockingDnsLookup() { return useBlockingDnsLookup; } @Override public OptionalInt maxPoolSize() { return maxPoolSize; } @Override public boolean followRedirects() { return followRedirects; } @Override public Proxy proxy() { return proxy; } @Override public Tls tls() { return tls; } } private Optional<String> authServerUrl; private Optional<Boolean> discoveryEnabled; private Optional<String> registrationPath; private Optional<Duration> connectionDelay; private int connectionRetryCount; private Duration connectionTimeout; private boolean useBlockingDnsLookup; private OptionalInt maxPoolSize; private boolean followRedirects; private Optional<String> proxyHost; private int proxyPort; private Optional<String> proxyUsername; private Optional<String> proxyPassword; private OidcCommonConfig.Tls tls; protected OidcCommonConfigBuilder(OidcCommonConfig oidcCommonConfig) { this.authServerUrl = oidcCommonConfig.authServerUrl(); this.discoveryEnabled = oidcCommonConfig.discoveryEnabled(); this.registrationPath = oidcCommonConfig.registrationPath(); this.connectionDelay = oidcCommonConfig.connectionDelay(); this.connectionRetryCount = oidcCommonConfig.connectionRetryCount(); this.connectionTimeout = oidcCommonConfig.connectionTimeout(); this.useBlockingDnsLookup = oidcCommonConfig.useBlockingDnsLookup(); this.maxPoolSize = oidcCommonConfig.maxPoolSize(); this.followRedirects = oidcCommonConfig.followRedirects(); this.proxyHost = oidcCommonConfig.proxy().host(); this.proxyPort = oidcCommonConfig.proxy().port(); this.proxyUsername = oidcCommonConfig.proxy().username(); this.proxyPassword = oidcCommonConfig.proxy().password(); this.tls = oidcCommonConfig.tls(); } protected abstract T getBuilder(); /** * @param authServerUrl {@link OidcCommonConfig#authServerUrl()} * @return T builder */ public T authServerUrl(String authServerUrl) { this.authServerUrl = Optional.ofNullable(authServerUrl); return getBuilder(); } /** * @param discoveryEnabled {@link OidcCommonConfig#discoveryEnabled()} * @return T builder */ public T discoveryEnabled(boolean discoveryEnabled) { this.discoveryEnabled = Optional.of(discoveryEnabled); return getBuilder(); } /** * @param registrationPath {@link OidcCommonConfig#registrationPath()} * @return T builder */ public T registrationPath(String registrationPath) { this.registrationPath = Optional.ofNullable(registrationPath); return getBuilder(); } /** * @param connectionDelay {@link OidcCommonConfig#connectionDelay()} * @return T builder */ public T connectionDelay(Duration connectionDelay) { this.connectionDelay = Optional.ofNullable(connectionDelay); return getBuilder(); } /** * @param connectionRetryCount {@link OidcCommonConfig#connectionRetryCount()} * @return T builder */ public T connectionRetryCount(int connectionRetryCount) { this.connectionRetryCount = connectionRetryCount; return getBuilder(); } /** * @param connectionTimeout {@link OidcCommonConfig#connectionTimeout()} * @return T builder */ public T connectionTimeout(Duration connectionTimeout) { this.connectionTimeout = connectionTimeout; return getBuilder(); } /** * @param useBlockingDnsLookup {@link OidcCommonConfig#useBlockingDnsLookup()} * @return T builder */ public T useBlockingDnsLookup(boolean useBlockingDnsLookup) { this.useBlockingDnsLookup = useBlockingDnsLookup; return getBuilder(); } /** * @param maxPoolSize {@link OidcCommonConfig#maxPoolSize()} * @return T builder */ public T maxPoolSize(int maxPoolSize) { this.maxPoolSize = OptionalInt.of(maxPoolSize); return getBuilder(); } /** * @param followRedirects {@link OidcCommonConfig#followRedirects()} * @return T builder */ public T followRedirects(boolean followRedirects) { this.followRedirects = followRedirects; return getBuilder(); } /** * @param host {@link OidcCommonConfig.Proxy#host()} * @param port {@link OidcCommonConfig.Proxy#port()} * @return T builder */ public T proxy(String host, int port) { this.proxyHost = Optional.ofNullable(host); this.proxyPort = port; return getBuilder(); } /** * @param host {@link OidcCommonConfig.Proxy#host()} * @param port {@link OidcCommonConfig.Proxy#port()} * @param username {@link OidcCommonConfig.Proxy#username()} * @param password {@link OidcCommonConfig.Proxy#password()} * @return T builder */ public T proxy(String host, int port, String username, String password) { this.proxyHost = Optional.ofNullable(host); this.proxyPort = port; this.proxyUsername = Optional.ofNullable(username); this.proxyPassword = Optional.ofNullable(password); return getBuilder(); } /** * @param tlsConfigurationName {@link OidcCommonConfig.Tls#tlsConfigurationName()} * @return T builder */ public T tlsConfigurationName(String tlsConfigurationName) { this.tls = new TlsImpl(tlsConfigurationName); return getBuilder(); } }
OidcCommonConfigImpl
java
google__dagger
javatests/dagger/internal/codegen/BindsMethodValidationTest.java
{ "start": 14441, "end": 14923 }
interface ____ {}"); CompilerTests.daggerCompiler(component, module, qualifier, k, v) .compile( subject -> { subject.hasErrorCount(1); subject.hasErrorContaining("Map<test.K,Provider<test.V>> cannot be provided"); }); } private DaggerModuleMethodSubject assertThatMethod(String method) { return assertThatModuleMethod(method).withDeclaration(moduleDeclaration); } @Qualifier @Retention(RUNTIME) public @
V
java
greenrobot__EventBus
EventBusTest/src/org/greenrobot/eventbus/indexed/EventBusSubscriberInJarTestWithIndex.java
{ "start": 866, "end": 1096 }
class ____ extends EventBusSubscriberInJarTest { @Before public void overwriteEventBus() throws Exception { eventBus = EventBus.builder().addIndex(new InJarIndex()).build(); } }
EventBusSubscriberInJarTestWithIndex
java
redisson__redisson
redisson/src/main/java/org/redisson/client/BaseRedisPubSubListener.java
{ "start": 745, "end": 1109 }
class ____ implements RedisPubSubListener<Object> { @Override public void onStatus(PubSubType type, CharSequence channel) { } @Override public void onMessage(CharSequence channel, Object message) { } @Override public void onPatternMessage(CharSequence pattern, CharSequence channel, Object message) { } }
BaseRedisPubSubListener
java
apache__camel
components/camel-thrift/src/test/java/org/apache/camel/component/thrift/generated/Calculator.java
{ "start": 148139, "end": 148421 }
class ____ implements org.apache.thrift.scheme.SchemeFactory { @Override public calculate_resultTupleScheme getScheme() { return new calculate_resultTupleScheme(); } } private static
calculate_resultTupleSchemeFactory
java
spring-projects__spring-framework
spring-beans/src/test/java/org/springframework/beans/BeanWrapperGenericsTests.java
{ "start": 22519, "end": 22589 }
class ____ extends HashMap<Integer, List<Long>> { } public
DerivedMap
java
elastic__elasticsearch
x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormat.java
{ "start": 6654, "end": 8393 }
class ____ extends DocValuesFormat { /** Sole Constructor */ public Lucene70DocValuesFormat() { super("Lucene70"); } @Override public DocValuesConsumer fieldsConsumer(SegmentWriteState state) throws IOException { return new Lucene70DocValuesConsumer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); } @Override public DocValuesProducer fieldsProducer(SegmentReadState state) throws IOException { return new Lucene70DocValuesProducer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); } static final String DATA_CODEC = "Lucene70DocValuesData"; static final String DATA_EXTENSION = "dvd"; static final String META_CODEC = "Lucene70DocValuesMetadata"; static final String META_EXTENSION = "dvm"; static final int VERSION_START = 0; static final int VERSION_CURRENT = VERSION_START; // indicates docvalues type static final byte NUMERIC = 0; static final byte BINARY = 1; static final byte SORTED = 2; static final byte SORTED_SET = 3; static final byte SORTED_NUMERIC = 4; static final int DIRECT_MONOTONIC_BLOCK_SHIFT = 16; static final int NUMERIC_BLOCK_SHIFT = 14; static final int NUMERIC_BLOCK_SIZE = 1 << NUMERIC_BLOCK_SHIFT; static final int TERMS_DICT_BLOCK_SHIFT = 4; static final int TERMS_DICT_BLOCK_SIZE = 1 << TERMS_DICT_BLOCK_SHIFT; static final int TERMS_DICT_BLOCK_MASK = TERMS_DICT_BLOCK_SIZE - 1; static final int TERMS_DICT_REVERSE_INDEX_SHIFT = 10; static final int TERMS_DICT_REVERSE_INDEX_SIZE = 1 << TERMS_DICT_REVERSE_INDEX_SHIFT; static final int TERMS_DICT_REVERSE_INDEX_MASK = TERMS_DICT_REVERSE_INDEX_SIZE - 1; }
Lucene70DocValuesFormat
java
spring-projects__spring-framework
spring-context/src/test/java/org/springframework/context/aot/ApplicationContextAotGeneratorTests.java
{ "start": 38687, "end": 42184 }
class ____ { @Test void failureProcessingBeanFactoryAotContribution() { GenericApplicationContext applicationContext = new GenericApplicationContext(); applicationContext.registerBeanDefinition("test", new RootBeanDefinition(FailingBeanFactoryInitializationAotContribution.class)); assertThatExceptionOfType(AotProcessingException.class) .isThrownBy(() -> processAheadOfTime(applicationContext)) .withMessageStartingWith("Error executing '") .withMessageContaining(FailingBeanFactoryInitializationAotContribution.class.getName()) .withMessageContaining("Test exception"); } } private static void registerBeanPostProcessor(GenericApplicationContext applicationContext, String beanName, Class<?> beanPostProcessorClass) { applicationContext.registerBeanDefinition(beanName, BeanDefinitionBuilder .rootBeanDefinition(beanPostProcessorClass).setRole(BeanDefinition.ROLE_INFRASTRUCTURE) .getBeanDefinition()); } private static void registerStringBean(GenericApplicationContext applicationContext, String beanName, String value) { applicationContext.registerBeanDefinition(beanName, BeanDefinitionBuilder .rootBeanDefinition(String.class).addConstructorArgValue(value) .getBeanDefinition()); } private static void registerIntegerBean(GenericApplicationContext applicationContext, String beanName, int value) { applicationContext.registerBeanDefinition(beanName, BeanDefinitionBuilder .rootBeanDefinition(Integer.class, "valueOf").addConstructorArgValue(value) .getBeanDefinition()); } private static Consumer<List<? extends JdkProxyHint>> doesNotHaveProxyFor(Class<?> target) { return hints -> assertThat(hints).noneMatch(hint -> hint.getProxiedInterfaces().get(0).equals(TypeReference.of(target))); } private static TestGenerationContext processAheadOfTime(GenericApplicationContext applicationContext) { ApplicationContextAotGenerator generator = new ApplicationContextAotGenerator(); TestGenerationContext generationContext = new TestGenerationContext(); generator.processAheadOfTime(applicationContext, generationContext); generationContext.writeGeneratedContent(); return generationContext; } private static void testCompiledResult(GenericApplicationContext applicationContext, BiConsumer<ApplicationContextInitializer<GenericApplicationContext>, Compiled> result) { testCompiledResult(processAheadOfTime(applicationContext), result); } @SuppressWarnings("unchecked") private static void testCompiledResult(TestGenerationContext generationContext, BiConsumer<ApplicationContextInitializer<GenericApplicationContext>, Compiled> result) { TestCompiler.forSystem().with(generationContext).compile(compiled -> result.accept(compiled.getInstance(ApplicationContextInitializer.class), compiled)); } private static GenericApplicationContext toFreshApplicationContext( ApplicationContextInitializer<GenericApplicationContext> initializer) { GenericApplicationContext freshApplicationContext = createFreshApplicationContext(initializer); freshApplicationContext.refresh(); return freshApplicationContext; } private static GenericApplicationContext createFreshApplicationContext( ApplicationContextInitializer<GenericApplicationContext> initializer) { GenericApplicationContext freshApplicationContext = new GenericApplicationContext(); initializer.initialize(freshApplicationContext); return freshApplicationContext; } static
ExceptionHandling
java
apache__spark
sql/core/src/test/java/test/org/apache/spark/sql/connector/JavaAdvancedDataSourceV2.java
{ "start": 1376, "end": 1712 }
class ____ implements TestingV2Source { @Override public Table getTable(CaseInsensitiveStringMap options) { return new JavaSimpleBatchTable() { @Override public ScanBuilder newScanBuilder(CaseInsensitiveStringMap options) { return new AdvancedScanBuilder(); } }; } static
JavaAdvancedDataSourceV2
java
ReactiveX__RxJava
src/test/java/io/reactivex/rxjava3/validators/ParamValidationCheckerTest.java
{ "start": 67112, "end": 67287 }
class ____ extends Observable<Object> { @Override public void subscribeActual(Observer<? super Object> observer) { // not invoked, the
NeverObservable
java
spring-projects__spring-framework
spring-core/src/main/java/org/springframework/core/env/ProfilesParser.java
{ "start": 4493, "end": 4554 }
enum ____ { NONE, NEGATE, PARENTHESIS } private static
Context
java
apache__hadoop
hadoop-cloud-storage-project/hadoop-tos/src/test/java/org/apache/hadoop/fs/tosfs/object/TestDirectoryStorage.java
{ "start": 1789, "end": 7441 }
class ____ { private final ObjectStorage storage; public TestDirectoryStorage() { Configuration conf = new Configuration(); storage = ObjectStorageFactory.createWithPrefix(String.format("%s-%s/", scheme(), UUIDUtils.random()), scheme(), TestUtility.bucket(), conf); } @BeforeAll public static void before() { assumeTrue(TestEnv.checkTestEnabled()); } @AfterEach public void tearDown() { CommonUtils.runQuietly(() -> storage.deleteAll("")); for (MultipartUpload upload : storage.listUploads("")) { storage.abortMultipartUpload(upload.key(), upload.uploadId()); } } @Test public void testListEmptyDir() { String key = "testListEmptyDir/"; mkdir(key); assertNotNull(directoryStorage().head(key)); assertFalse(directoryStorage().listDir(key, false).iterator().hasNext()); assertFalse(directoryStorage().listDir(key, false).iterator().hasNext()); assertTrue(directoryStorage().isEmptyDir(key)); } @Test public void testListNonExistDir() { String key = "testListNonExistDir/"; assertNull(directoryStorage().head(key)); assertFalse(directoryStorage().listDir(key, false).iterator().hasNext()); assertFalse(directoryStorage().listDir(key, false).iterator().hasNext()); assertTrue(directoryStorage().isEmptyDir(key)); } @Test public void testRecursiveList() { String root = "root/"; String file1 = "root/file1"; String file2 = "root/afile2"; String dir1 = "root/dir1/"; String file3 = "root/dir1/file3"; mkdir(root); mkdir(dir1); touchFile(file1, TestUtility.rand(8)); touchFile(file2, TestUtility.rand(8)); touchFile(file3, TestUtility.rand(8)); assertThat(directoryStorage().listDir(root, false)) .hasSize(3) .extracting(ObjectInfo::key) .contains(dir1, file1, file2); assertThat(directoryStorage().listDir(root, true)) .hasSize(4) .extracting(ObjectInfo::key) .contains(dir1, file1, file2, file3); } @Test public void testRecursiveListWithSmallBatch() { Configuration conf = new Configuration(directoryStorage().conf()); conf.setInt(TosKeys.FS_TOS_LIST_OBJECTS_COUNT, 5); directoryStorage().initialize(conf, directoryStorage().bucket().name()); String root = "root/"; mkdir(root); // Create 2 files start with 'a', 2 sub dirs start with 'b', 2 files start with 'c' for (int i = 1; i <= 2; i++) { touchFile("root/a-file-" + i, TestUtility.rand(8)); mkdir("root/b-dir-" + i + "/"); touchFile("root/c-file-" + i, TestUtility.rand(8)); } // Create two files under each sub dirs. for (int j = 1; j <= 2; j++) { touchFile(String.format("root/b-dir-%d/file1", j), TestUtility.rand(8)); touchFile(String.format("root/b-dir-%d/file2", j), TestUtility.rand(8)); } assertThat(directoryStorage().listDir(root, false)) .hasSize(6) .extracting(ObjectInfo::key) .contains( "root/a-file-1", "root/a-file-2", "root/b-dir-1/", "root/b-dir-2/", "root/c-file-1", "root/c-file-2"); assertThat(directoryStorage().listDir(root, true)) .hasSize(10) .extracting(ObjectInfo::key) .contains( "root/a-file-1", "root/a-file-2", "root/b-dir-1/", "root/b-dir-1/file1", "root/b-dir-1/file2", "root/b-dir-2/", "root/b-dir-2/file1", "root/b-dir-2/file2", "root/c-file-1", "root/c-file-2"); } @Test public void testRecursiveListRoot() { String root = "root/"; String dir1 = "root/dir1/"; mkdir(root); mkdir(dir1); assertThat(directoryStorage().listDir("", true)) .hasSize(2) .extracting(ObjectInfo::key) .contains("root/", "root/dir1/"); } @Test public void testDeleteEmptyDir() { String dir = "a/b/"; mkdir(dir); directoryStorage().deleteDir(dir, false); assertNull(directoryStorage().head(dir)); } @Test public void testDeleteNonEmptyDir() { String dir = "a/b/"; String subDir = "a/b/c/"; String file = "a/b/file.txt"; mkdir(dir); mkdir(subDir); touchFile(file, new byte[10]); assertThrows(RuntimeException.class, () -> directoryStorage().deleteDir(dir, false)); assertNotNull(directoryStorage().head(dir)); assertNotNull(directoryStorage().head(subDir)); assertNotNull(directoryStorage().head(file)); directoryStorage().deleteDir(dir, true); assertNull(directoryStorage().head(dir)); assertNull(directoryStorage().head(subDir)); assertNull(directoryStorage().head(file)); } @Test public void testRecursiveDeleteDirViaTosSDK() { Configuration conf = new Configuration(directoryStorage().conf()); conf.setBoolean(TosKeys.FS_TOS_RMR_CLIENT_ENABLE, true); directoryStorage().initialize(conf, directoryStorage().bucket().name()); testDeleteNonEmptyDir(); } // TOS doesn't enable recursive delete in server side currently. @Disabled @Test public void testAtomicDeleteDir() { Configuration conf = new Configuration(directoryStorage().conf()); conf.setBoolean(TosKeys.FS_TOS_RMR_SERVER_ENABLED, true); directoryStorage().initialize(conf, directoryStorage().bucket().name()); testDeleteNonEmptyDir(); } private void touchFile(String key, byte[] data) { directoryStorage().put(key, data); } private void mkdir(String key) { directoryStorage().put(key, new byte[0]); } private DirectoryStorage directoryStorage() { assumeTrue(storage.bucket().isDirectory()); return (DirectoryStorage) storage; } }
TestDirectoryStorage
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/batch/BatchAndClassIdCollectionTest.java
{ "start": 1497, "end": 3420 }
class ____ { @BeforeAll public void setUp(SessionFactoryScope scope) { scope.inTransaction( session -> { for (long i = 1L; i < 11; i++) { Parent parent = new Parent( i ); Child child = new Child( i * 100L + 1L, parent ); Child child2 = new Child( i * 100L + 2L, parent ); Child child3 = new Child( i * 100L + 3L, parent ); Child child4 = new Child( i * 100L + 4L, parent ); Child child5 = new Child( i * 100L + 5L, parent ); Child child6 = new Child( i * 100L + 6L, parent ); Child child7 = new Child( i * 100L + 7L, parent ); Child child8 = new Child( i * 100L + 8L, parent ); Child child9 = new Child( i * 100L + 9L, parent ); Child child10 = new Child( i * 100L + 10L, parent ); Child child11 = new Child( i * 100L + 11L, parent ); session.persist( parent ); } } ); } @Test public void testBatchInitializeChildCollection(SessionFactoryScope scope){ SQLStatementInspector statementInspector = (SQLStatementInspector) scope.getStatementInspector(); scope.inTransaction( session -> { statementInspector.clear(); final List<Parent> list = session.createSelectionQuery( "from Parent", Parent.class ) .getResultList(); list.get( 0 ).getChildren().size(); statementInspector.assertExecutedCount( 2 ); Assertions.assertThat( statementInspector.getSqlQueries().get( 0 ) ).doesNotContain( "?" ); if ( scope.getSessionFactory().getJdbcServices().getDialect().useArrayForMultiValuedParameters() ) { Assertions.assertThat( statementInspector.getSqlQueries().get( 1 ) ).containsOnlyOnce( "?" ); } else { Assertions.assertThat( statementInspector.getSqlQueries().get( 1 ) ).containsOnlyOnce( "in (?,?,?,?,?)" ); } } ); } @Entity(name = "Child") @Table(name = "child_tablle") @IdClass(Child.IdClass.class) public static
BatchAndClassIdCollectionTest
java
google__guava
android/guava-tests/test/com/google/common/base/BenchmarkHelpers.java
{ "start": 799, "end": 1984 }
class ____ { private static final String WHITESPACE_CHARACTERS = "\u00a0\u180e\u202f\t\n\013\f\r \u0085" + "\u1680\u2028\u2029\u205f\u3000\u2000\u2001\u2002\u2003\u2004\u2005" + "\u2006\u2007\u2008\u2009\u200a"; private static final String ASCII_CHARACTERS; static { int spaceInAscii = 32; int sevenBitAsciiMax = 128; StringBuilder sb = new StringBuilder(sevenBitAsciiMax - spaceInAscii); for (int ch = spaceInAscii; ch < sevenBitAsciiMax; ch++) { sb.append((char) ch); } ASCII_CHARACTERS = sb.toString(); } private static final String ALL_DIGITS; static { StringBuilder sb = new StringBuilder(); String zeros = "0\u0660\u06f0\u07c0\u0966\u09e6\u0a66\u0ae6\u0b66\u0be6\u0c66" + "\u0ce6\u0d66\u0e50\u0ed0\u0f20\u1040\u1090\u17e0\u1810\u1946" + "\u19d0\u1b50\u1bb0\u1c40\u1c50\ua620\ua8d0\ua900\uaa50\uff10"; for (char base : zeros.toCharArray()) { for (int offset = 0; offset < 10; offset++) { sb.append((char) (base + offset)); } } ALL_DIGITS = sb.toString(); } /** Sample CharMatcher instances for benchmarking. */ public
BenchmarkHelpers
java
elastic__elasticsearch
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlResponseListenerTests.java
{ "start": 4862, "end": 5293 }
class ____ extends AbstractAppender { public final List<LogEvent> events = new ArrayList<>(); MockAppender(final String name) throws IllegalAccessException { super(name, RegexFilter.createFilter(".*(\n.*)*", new String[0], false, null, null), null, false); } @Override public void append(LogEvent event) { events.add(event.toImmutable()); } } }
MockAppender
java
netty__netty
codec-http/src/main/java/io/netty/handler/codec/rtsp/RtspEncoder.java
{ "start": 1277, "end": 2820 }
class ____ extends HttpObjectEncoder<HttpMessage> { private static final int CRLF_SHORT = (CR << 8) | LF; @Override public boolean acceptOutboundMessage(final Object msg) throws Exception { return super.acceptOutboundMessage(msg) && ((msg instanceof HttpRequest) || (msg instanceof HttpResponse)); } @Override protected void encodeInitialLine(final ByteBuf buf, final HttpMessage message) throws Exception { if (message instanceof HttpRequest) { HttpRequest request = (HttpRequest) message; ByteBufUtil.copy(request.method().asciiName(), buf); buf.writeByte(SP); buf.writeCharSequence(request.uri(), CharsetUtil.UTF_8); buf.writeByte(SP); buf.writeCharSequence(request.protocolVersion().toString(), CharsetUtil.US_ASCII); ByteBufUtil.writeShortBE(buf, CRLF_SHORT); } else if (message instanceof HttpResponse) { HttpResponse response = (HttpResponse) message; buf.writeCharSequence(response.protocolVersion().toString(), CharsetUtil.US_ASCII); buf.writeByte(SP); ByteBufUtil.copy(response.status().codeAsText(), buf); buf.writeByte(SP); buf.writeCharSequence(response.status().reasonPhrase(), CharsetUtil.US_ASCII); ByteBufUtil.writeShortBE(buf, CRLF_SHORT); } else { throw new UnsupportedMessageTypeException(message, HttpRequest.class, HttpResponse.class); } } }
RtspEncoder
java
quarkusio__quarkus
extensions/undertow/deployment/src/test/java/io/quarkus/undertow/test/NoUrlPatternWebFilter.java
{ "start": 387, "end": 650 }
class ____ implements Filter { @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { response.getWriter().write("Goodbye"); } }
NoUrlPatternWebFilter
java
FasterXML__jackson-databind
src/main/java/tools/jackson/databind/deser/impl/FailingDeserializer.java
{ "start": 510, "end": 1007 }
class ____ extends StdDeserializer<Object> { protected final String _message; public FailingDeserializer(String m) { this(Object.class, m); } public FailingDeserializer(Class<?> rawType, String m) { super(rawType); _message = m; } @Override public Object deserialize(JsonParser p, DeserializationContext ctxt) throws JacksonException { ctxt.reportInputMismatch(this, _message); return null; } }
FailingDeserializer
java
junit-team__junit5
platform-tests/src/test/java/org/junit/platform/suite/engine/testsuites/NestedSuite.java
{ "start": 712, "end": 793 }
class ____ { } @Suite @SelectClasses(MultipleTestsTestCase.class) static
Jupiter
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/dispatcher/cleanup/ResourceCleanerFactory.java
{ "start": 1218, "end": 2140 }
interface ____ { /** * Creates {@link ResourceCleaner} that initiates {@link * LocallyCleanableResource#localCleanupAsync(JobID, Executor)} calls. * * @param mainThreadExecutor Used for validating that the {@link * LocallyCleanableResource#localCleanupAsync(JobID, Executor)} is called from the main * thread. */ ResourceCleaner createLocalResourceCleaner(ComponentMainThreadExecutor mainThreadExecutor); /** * Creates {@link ResourceCleaner} that initiates {@link * GloballyCleanableResource#globalCleanupAsync(JobID, Executor)} calls. * * @param mainThreadExecutor Used for validating that the {@link * GloballyCleanableResource#globalCleanupAsync(JobID, Executor)} is called from the main * thread. */ ResourceCleaner createGlobalResourceCleaner(ComponentMainThreadExecutor mainThreadExecutor); }
ResourceCleanerFactory
java
spring-projects__spring-boot
core/spring-boot/src/main/java/org/springframework/boot/ssl/FixedTrustManagerFactory.java
{ "start": 1562, "end": 2038 }
class ____ extends TrustManagerFactorySpi { private final TrustManager[] trustManagers; private FixedTrustManagersSpi(TrustManager[] trustManagers) { this.trustManagers = trustManagers; } @Override protected void engineInit(KeyStore ks) { } @Override protected void engineInit(ManagerFactoryParameters spec) { } @Override protected TrustManager[] engineGetTrustManagers() { return this.trustManagers; } } private static
FixedTrustManagersSpi
java
elastic__elasticsearch
x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeValues.java
{ "start": 2363, "end": 4173 }
class ____ extends ShapeValues.ShapeValue { private final Tile2DVisitor tile2DVisitor; // This does not work for cartesian, so we currently only support this in geo public GeoShapeValue() { super(CoordinateEncoder.GEO, (x, y) -> new GeoPoint(y, x)); this.tile2DVisitor = new Tile2DVisitor(); } @SuppressWarnings("this-escape") public GeoShapeValue(StreamInput in) throws IOException { this(); reset(in); } @Override protected Component2D centroidAsComponent2D() throws IOException { return LatLonGeometry.create(new Point(getY(), getX())); } /** * Determine the {@link GeoRelation} between the current shape and a bounding box provided in * the encoded space. This does not work for cartesian, so we currently only support this in geo. */ public GeoRelation relate(int minX, int maxX, int minY, int maxY) throws IOException { tile2DVisitor.reset(minX, minY, maxX, maxY); visit(tile2DVisitor); return tile2DVisitor.relation(); } /** * Determine the {@link GeoRelation} between the current shape and a {@link LatLonGeometry}. It only supports * simple geometries, therefore it will fail if the LatLonGeometry is a {@link org.apache.lucene.geo.Rectangle} * that crosses the dateline. * TODO: this is a test only method, perhaps should be moved to test code */ public GeoRelation relate(LatLonGeometry geometry) throws IOException { return relate(LatLonGeometry.create(geometry)); } @Override public String getWriteableName() { return "GeoShapeValue"; } } }
GeoShapeValue
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/federation/store/sql/TestFederationSQLServerScriptAccuracy.java
{ "start": 1113, "end": 2124 }
class ____ extends FederationSQLAccuracyTest { private static final Logger LOG = LoggerFactory.getLogger(TestFederationSQLServerScriptAccuracy.class); private static final String SQLSERVER_COMPATIBILITY = ";sql.syntax_mss=true"; @Override protected SQLServerFederationStateStore createStateStore() { return new SQLServerFederationStateStore(); } @Override protected String getSQLURL() { return DATABASE_URL + System.currentTimeMillis() + SQLSERVER_COMPATIBILITY; } @Test public void checkSqlServerScriptAccuracy() throws SQLException { SQLServerFederationStateStore federationStateStore = this.createStateStore(); federationStateStore.init(getConf()); // get a list of tables List<String> tables = federationStateStore.getTables(); for (String table : tables) { federationStateStore.getConn().prepareStatement(table).execute(); } LOG.info("FederationStateStore create {} tables.", tables.size()); } }
TestFederationSQLServerScriptAccuracy
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/dependency/PersonDto.java
{ "start": 198, "end": 955 }
class ____ { private String firstName; private String middleName; private String lastName; private String fullName; public String getFirstName() { return firstName; } public void setFirstName(String firstName) { this.firstName = firstName; } public String getMiddleName() { return middleName; } public void setMiddleName(String middleName) { this.middleName = middleName; } public String getLastName() { return lastName; } public void setLastName(String lastName) { this.lastName = lastName; this.fullName = firstName + " " + middleName + " " + lastName; } public String getFullName() { return fullName; } }
PersonDto
java
apache__flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/transforms/LegacyRawTypeTransformation.java
{ "start": 1515, "end": 2301 }
class ____ implements TypeTransformation { public static final TypeTransformation INSTANCE = new LegacyRawTypeTransformation(); @Override public DataType transform(DataType typeToTransform) { LogicalType logicalType = typeToTransform.getLogicalType(); if (logicalType instanceof LegacyTypeInformationType && logicalType.getTypeRoot() == LogicalTypeRoot.RAW) { TypeInformation<?> typeInfo = ((LegacyTypeInformationType<?>) logicalType).getTypeInformation(); DataType rawDataType = new AtomicDataType(new TypeInformationRawType<>(typeInfo)); return logicalType.isNullable() ? rawDataType : rawDataType.notNull(); } return typeToTransform; } }
LegacyRawTypeTransformation
java
elastic__elasticsearch
server/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java
{ "start": 1079, "end": 13615 }
class ____ extends ESTestCase { public void testFromString() { assertSame(Fuzziness.AUTO, Fuzziness.fromString("AUTO")); assertSame(Fuzziness.AUTO, Fuzziness.fromString("auto")); assertSame(Fuzziness.ZERO, Fuzziness.fromString("0")); assertSame(Fuzziness.ZERO, Fuzziness.fromString("0.0")); assertSame(Fuzziness.ONE, Fuzziness.fromString("1")); assertSame(Fuzziness.ONE, Fuzziness.fromString("1.0")); assertSame(Fuzziness.TWO, Fuzziness.fromString("2")); assertSame(Fuzziness.TWO, Fuzziness.fromString("2.0")); // cases that should throw exceptions IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> Fuzziness.fromString(null)); assertEquals("fuzziness cannot be null or empty.", ex.getMessage()); ex = expectThrows(IllegalArgumentException.class, () -> Fuzziness.fromString("")); assertEquals("fuzziness cannot be null or empty.", ex.getMessage()); ex = expectThrows(IllegalArgumentException.class, () -> Fuzziness.fromString("foo")); assertEquals("fuzziness cannot be [foo].", ex.getMessage()); ex = expectThrows(IllegalArgumentException.class, () -> Fuzziness.fromString("1.2")); assertEquals("fuzziness needs to be one of 0.0, 1.0 or 2.0 but was 1.2", ex.getMessage()); } public void testNumericConstants() { assertSame(Fuzziness.ZERO, Fuzziness.fromEdits(0)); assertSame(Fuzziness.ZERO, Fuzziness.fromString("0")); assertSame(Fuzziness.ZERO, Fuzziness.fromString("0.0")); assertThat(Fuzziness.ZERO.asString(), equalTo("0")); assertThat(Fuzziness.ZERO.asDistance(), equalTo(0)); assertThat(Fuzziness.ZERO.asDistance(randomAlphaOfLengthBetween(0, randomIntBetween(1, 500))), equalTo(0)); assertThat(Fuzziness.ZERO.asFloat(), equalTo(0.0f)); assertSame(Fuzziness.ONE, Fuzziness.fromEdits(1)); assertSame(Fuzziness.ONE, Fuzziness.fromString("1")); assertSame(Fuzziness.ONE, Fuzziness.fromString("1.0")); assertThat(Fuzziness.ONE.asString(), equalTo("1")); assertThat(Fuzziness.ONE.asDistance(), equalTo(1)); assertThat(Fuzziness.ONE.asDistance(randomAlphaOfLengthBetween(0, randomIntBetween(1, 500))), equalTo(1)); assertThat(Fuzziness.ONE.asFloat(), equalTo(1.0f)); assertSame(Fuzziness.TWO, Fuzziness.fromEdits(2)); assertSame(Fuzziness.TWO, Fuzziness.fromString("2")); assertSame(Fuzziness.TWO, Fuzziness.fromString("2.0")); assertThat(Fuzziness.TWO.asString(), equalTo("2")); assertThat(Fuzziness.TWO.asDistance(), equalTo(2)); assertThat(Fuzziness.TWO.asDistance(randomAlphaOfLengthBetween(0, randomIntBetween(1, 500))), equalTo(2)); assertThat(Fuzziness.TWO.asFloat(), equalTo(2.0f)); } public void testAutoFuzziness() { assertSame(Fuzziness.AUTO, Fuzziness.fromString("auto")); assertSame(Fuzziness.AUTO, Fuzziness.fromString("AUTO")); assertThat(Fuzziness.AUTO.asString(), equalTo("AUTO")); assertThat(Fuzziness.AUTO.asDistance(), equalTo(1)); assertThat(Fuzziness.AUTO.asDistance(randomAlphaOfLengthBetween(0, 2)), equalTo(0)); assertThat(Fuzziness.AUTO.asDistance(randomAlphaOfLengthBetween(3, 5)), equalTo(1)); assertThat(Fuzziness.AUTO.asDistance(randomAlphaOfLengthBetween(6, 100)), equalTo(2)); assertThat(Fuzziness.AUTO.asFloat(), equalTo(1.0f)); } public void testCustomAutoFuzziness() { int lowDistance = randomIntBetween(1, 10); int highDistance = randomIntBetween(lowDistance, 20); String auto = randomFrom("auto", "AUTO"); Fuzziness fuzziness = Fuzziness.fromString(auto + ":" + lowDistance + "," + highDistance); assertNotSame(Fuzziness.AUTO, fuzziness); if (lowDistance != Fuzziness.DEFAULT_LOW_DISTANCE || highDistance != Fuzziness.DEFAULT_HIGH_DISTANCE) { assertThat(fuzziness.asString(), equalTo("AUTO:" + lowDistance + "," + highDistance)); } if (lowDistance > 5) { assertThat(fuzziness.asDistance(), equalTo(0)); } else if (highDistance > 5) { assertThat(fuzziness.asDistance(), equalTo(1)); } else { assertThat(fuzziness.asDistance(), equalTo(2)); } assertThat(fuzziness.asDistance(randomAlphaOfLengthBetween(0, lowDistance - 1)), equalTo(0)); if (lowDistance != highDistance) { assertThat(fuzziness.asDistance(randomAlphaOfLengthBetween(lowDistance, highDistance - 1)), equalTo(1)); } assertThat(fuzziness.asDistance(randomAlphaOfLengthBetween(highDistance, 100)), equalTo(2)); assertThat(fuzziness.asFloat(), equalTo(1.0f)); } public void testFromEditsIllegalArgs() { int illegalValue = randomValueOtherThanMany(i -> i >= 0 && i <= 2, () -> randomInt()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> Fuzziness.fromEdits(illegalValue)); assertThat(e.getMessage(), equalTo("Valid edit distances are [0, 1, 2] but was [" + illegalValue + "]")); } public void testFromStringIllegalArgs() { Exception e = expectThrows(IllegalArgumentException.class, () -> Fuzziness.fromString(null)); assertThat(e.getMessage(), equalTo("fuzziness cannot be null or empty.")); e = expectThrows(IllegalArgumentException.class, () -> Fuzziness.fromString("")); assertThat(e.getMessage(), equalTo("fuzziness cannot be null or empty.")); e = expectThrows(IllegalArgumentException.class, () -> Fuzziness.fromString("illegal")); assertThat(e.getMessage(), equalTo("fuzziness cannot be [illegal].")); e = expectThrows(ElasticsearchParseException.class, () -> Fuzziness.fromString("AUTO:badFormat")); assertThat(e.getMessage(), equalTo("failed to find low and high distance values")); } public void testParseFromXContent() throws IOException { final int iters = randomIntBetween(10, 50); for (int i = 0; i < iters; i++) { { float floatValue = randomFrom(0.0f, 1.0f, 2.0f); XContentBuilder json = jsonBuilder().startObject() .field(Fuzziness.X_FIELD_NAME, randomBoolean() ? String.valueOf(floatValue) : floatValue) .endObject(); try (XContentParser parser = createParser(json)) { assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); assertThat( parser.nextToken(), anyOf(equalTo(XContentParser.Token.VALUE_NUMBER), equalTo(XContentParser.Token.VALUE_STRING)) ); Fuzziness fuzziness = Fuzziness.parse(parser); assertThat(fuzziness.asFloat(), equalTo(floatValue)); assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); } } { int intValue = randomIntBetween(0, 2); XContentBuilder json = jsonBuilder().startObject() .field(Fuzziness.X_FIELD_NAME, randomBoolean() ? String.valueOf(intValue) : intValue) .endObject(); try (XContentParser parser = createParser(json)) { assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); assertThat( parser.nextToken(), anyOf(equalTo(XContentParser.Token.VALUE_NUMBER), equalTo(XContentParser.Token.VALUE_STRING)) ); Fuzziness fuzziness = Fuzziness.parse(parser); assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); switch (intValue) { case 1 -> assertThat(fuzziness, sameInstance(Fuzziness.ONE)); case 2 -> assertThat(fuzziness, sameInstance(Fuzziness.TWO)); case 0 -> assertThat(fuzziness, sameInstance(Fuzziness.ZERO)); } } } { XContentBuilder json; boolean isDefaultAutoFuzzinessTested = randomBoolean(); Fuzziness expectedFuzziness = Fuzziness.AUTO; if (isDefaultAutoFuzzinessTested) { json = Fuzziness.AUTO.toXContent(jsonBuilder().startObject(), null).endObject(); } else { StringBuilder auto = new StringBuilder(); auto = randomBoolean() ? auto.append("AUTO") : auto.append("auto"); if (randomBoolean()) { int lowDistance = randomIntBetween(1, 3); int highDistance = randomIntBetween(4, 10); auto.append(":").append(lowDistance).append(",").append(highDistance); expectedFuzziness = Fuzziness.fromString(auto.toString()); } json = expectedFuzziness.toXContent(jsonBuilder().startObject(), null).endObject(); } try (XContentParser parser = createParser(json)) { assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING)); Fuzziness fuzziness = Fuzziness.parse(parser); if (isDefaultAutoFuzzinessTested) { assertThat(fuzziness, sameInstance(expectedFuzziness)); } else { assertEquals(expectedFuzziness, fuzziness); } assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); } } } } public void testSerialization() throws IOException { Fuzziness fuzziness = Fuzziness.AUTO; Fuzziness deserializedFuzziness = doSerializeRoundtrip(fuzziness); assertEquals(fuzziness, deserializedFuzziness); fuzziness = Fuzziness.fromEdits(randomIntBetween(0, 2)); deserializedFuzziness = doSerializeRoundtrip(fuzziness); assertEquals(fuzziness, deserializedFuzziness); // custom AUTO int lowDistance = randomIntBetween(1, 10); int highDistance = randomIntBetween(lowDistance, 20); fuzziness = Fuzziness.fromString("AUTO:" + lowDistance + "," + highDistance); deserializedFuzziness = doSerializeRoundtrip(fuzziness); assertNotSame(fuzziness, deserializedFuzziness); assertEquals(fuzziness, deserializedFuzziness); assertEquals(fuzziness.asString(), deserializedFuzziness.asString()); } private static Fuzziness doSerializeRoundtrip(Fuzziness in) throws IOException { BytesStreamOutput output = new BytesStreamOutput(); in.writeTo(output); StreamInput streamInput = output.bytes().streamInput(); return new Fuzziness(streamInput); } public void testAsDistanceString() { Fuzziness fuzziness = Fuzziness.fromEdits(0); assertEquals(0, fuzziness.asDistance(randomAlphaOfLengthBetween(0, 10))); fuzziness = Fuzziness.fromEdits(1); assertEquals(1, fuzziness.asDistance(randomAlphaOfLengthBetween(0, 10))); fuzziness = Fuzziness.fromEdits(2); assertEquals(2, fuzziness.asDistance(randomAlphaOfLengthBetween(0, 10))); fuzziness = Fuzziness.fromString("AUTO"); assertEquals(0, fuzziness.asDistance("")); assertEquals(0, fuzziness.asDistance("ab")); assertEquals(1, fuzziness.asDistance("abc")); assertEquals(1, fuzziness.asDistance("abcde")); assertEquals(2, fuzziness.asDistance("abcdef")); fuzziness = Fuzziness.fromString("AUTO:5,7"); assertEquals(0, fuzziness.asDistance("")); assertEquals(0, fuzziness.asDistance("abcd")); assertEquals(1, fuzziness.asDistance("abcde")); assertEquals(1, fuzziness.asDistance("abcdef")); assertEquals(2, fuzziness.asDistance("abcdefg")); } }
FuzzinessTests
java
elastic__elasticsearch
modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java
{ "start": 6253, "end": 6978 }
interface ____ { OneArg newInstance(); } public static final ScriptContext<Factory> CONTEXT = new ScriptContext<>("onearg", Factory.class); public static final String[] PARAMETERS = new String[] { "arg" }; public abstract Object execute(Object arg); } public void testOneArg() throws Exception { Object rando = randomInt(); assertEquals(rando, scriptEngine.compile("testOneArg0", "arg", OneArg.CONTEXT, emptyMap()).newInstance().execute(rando)); rando = randomAlphaOfLength(5); assertEquals(rando, scriptEngine.compile("testOneArg1", "arg", OneArg.CONTEXT, emptyMap()).newInstance().execute(rando)); } public abstract static
Factory
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/type/descriptor/java/TimeZoneJavaType.java
{ "start": 493, "end": 1869 }
class ____ implements Comparator<TimeZone> { public static final TimeZoneComparator INSTANCE = new TimeZoneComparator(); public int compare(TimeZone o1, TimeZone o2) { return o1.getID().compareTo( o2.getID() ); } } public TimeZoneJavaType() { super( TimeZone.class, ImmutableMutabilityPlan.instance(), TimeZoneComparator.INSTANCE ); } @Override public boolean isInstance(Object value) { return value instanceof TimeZone; } @Override public boolean useObjectEqualsHashCode() { return true; } public String toString(TimeZone value) { return value.getID(); } public TimeZone fromString(CharSequence string) { return TimeZone.getTimeZone( string.toString() ); } @SuppressWarnings("unchecked") public <X> X unwrap(TimeZone value, Class<X> type, WrapperOptions options) { if ( value == null ) { return null; } if ( TimeZone.class.isAssignableFrom( type ) ) { return (X) value; } if ( String.class.isAssignableFrom( type ) ) { return (X) toString( value ); } throw unknownUnwrap( type ); } public <X> TimeZone wrap(X value, WrapperOptions options) { if ( value == null ) { return null; } if ( value instanceof TimeZone ) { return (TimeZone) value; } if ( value instanceof CharSequence ) { return fromString( (CharSequence) value ); } throw unknownWrap( value.getClass() ); } }
TimeZoneComparator
java
spring-projects__spring-boot
module/spring-boot-micrometer-tracing-opentelemetry/src/main/java/org/springframework/boot/micrometer/tracing/opentelemetry/autoconfigure/OpenTelemetryEventPublisherBeansTestExecutionListener.java
{ "start": 1096, "end": 1340 }
class ____ implements TestExecutionListener { @Override public void executionStarted(TestIdentifier testIdentifier) { OpenTelemetryEventPublisherBeansApplicationListener.addWrapper(); } }
OpenTelemetryEventPublisherBeansTestExecutionListener
java
apache__camel
components/camel-kubernetes/src/generated/java/org/apache/camel/component/kubernetes/cloud/KubernetesServiceDiscoveryFactoryConfigurer.java
{ "start": 755, "end": 7719 }
class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter { @Override public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) { org.apache.camel.component.kubernetes.cloud.KubernetesServiceDiscoveryFactory target = (org.apache.camel.component.kubernetes.cloud.KubernetesServiceDiscoveryFactory) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "apiversion": case "apiVersion": target.setApiVersion(property(camelContext, java.lang.String.class, value)); return true; case "cacertdata": case "caCertData": target.setCaCertData(property(camelContext, java.lang.String.class, value)); return true; case "cacertfile": case "caCertFile": target.setCaCertFile(property(camelContext, java.lang.String.class, value)); return true; case "clientcertdata": case "clientCertData": target.setClientCertData(property(camelContext, java.lang.String.class, value)); return true; case "clientcertfile": case "clientCertFile": target.setClientCertFile(property(camelContext, java.lang.String.class, value)); return true; case "clientkeyalgo": case "clientKeyAlgo": target.setClientKeyAlgo(property(camelContext, java.lang.String.class, value)); return true; case "clientkeydata": case "clientKeyData": target.setClientKeyData(property(camelContext, java.lang.String.class, value)); return true; case "clientkeyfile": case "clientKeyFile": target.setClientKeyFile(property(camelContext, java.lang.String.class, value)); return true; case "clientkeypassphrase": case "clientKeyPassphrase": target.setClientKeyPassphrase(property(camelContext, java.lang.String.class, value)); return true; case "dnsdomain": case "dnsDomain": target.setDnsDomain(property(camelContext, java.lang.String.class, value)); return true; case "lookup": target.setLookup(property(camelContext, java.lang.String.class, value)); return true; case "masterurl": case "masterUrl": target.setMasterUrl(property(camelContext, java.lang.String.class, value)); return true; case "namespace": target.setNamespace(property(camelContext, java.lang.String.class, value)); return true; case "oauthtoken": case "oauthToken": target.setOauthToken(property(camelContext, java.lang.String.class, value)); return true; case "password": target.setPassword(property(camelContext, java.lang.String.class, value)); return true; case "portname": case "portName": target.setPortName(property(camelContext, java.lang.String.class, value)); return true; case "portprotocol": case "portProtocol": target.setPortProtocol(property(camelContext, java.lang.String.class, value)); return true; case "trustcerts": case "trustCerts": target.setTrustCerts(property(camelContext, java.lang.Boolean.class, value)); return true; case "username": target.setUsername(property(camelContext, java.lang.String.class, value)); return true; default: return false; } } @Override public Class<?> getOptionType(String name, boolean ignoreCase) { switch (ignoreCase ? name.toLowerCase() : name) { case "apiversion": case "apiVersion": return java.lang.String.class; case "cacertdata": case "caCertData": return java.lang.String.class; case "cacertfile": case "caCertFile": return java.lang.String.class; case "clientcertdata": case "clientCertData": return java.lang.String.class; case "clientcertfile": case "clientCertFile": return java.lang.String.class; case "clientkeyalgo": case "clientKeyAlgo": return java.lang.String.class; case "clientkeydata": case "clientKeyData": return java.lang.String.class; case "clientkeyfile": case "clientKeyFile": return java.lang.String.class; case "clientkeypassphrase": case "clientKeyPassphrase": return java.lang.String.class; case "dnsdomain": case "dnsDomain": return java.lang.String.class; case "lookup": return java.lang.String.class; case "masterurl": case "masterUrl": return java.lang.String.class; case "namespace": return java.lang.String.class; case "oauthtoken": case "oauthToken": return java.lang.String.class; case "password": return java.lang.String.class; case "portname": case "portName": return java.lang.String.class; case "portprotocol": case "portProtocol": return java.lang.String.class; case "trustcerts": case "trustCerts": return java.lang.Boolean.class; case "username": return java.lang.String.class; default: return null; } } @Override public Object getOptionValue(Object obj, String name, boolean ignoreCase) { org.apache.camel.component.kubernetes.cloud.KubernetesServiceDiscoveryFactory target = (org.apache.camel.component.kubernetes.cloud.KubernetesServiceDiscoveryFactory) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "apiversion": case "apiVersion": return target.getApiVersion(); case "cacertdata": case "caCertData": return target.getCaCertData(); case "cacertfile": case "caCertFile": return target.getCaCertFile(); case "clientcertdata": case "clientCertData": return target.getClientCertData(); case "clientcertfile": case "clientCertFile": return target.getClientCertFile(); case "clientkeyalgo": case "clientKeyAlgo": return target.getClientKeyAlgo(); case "clientkeydata": case "clientKeyData": return target.getClientKeyData(); case "clientkeyfile": case "clientKeyFile": return target.getClientKeyFile(); case "clientkeypassphrase": case "clientKeyPassphrase": return target.getClientKeyPassphrase(); case "dnsdomain": case "dnsDomain": return target.getDnsDomain(); case "lookup": return target.getLookup(); case "masterurl": case "masterUrl": return target.getMasterUrl(); case "namespace": return target.getNamespace(); case "oauthtoken": case "oauthToken": return target.getOauthToken(); case "password": return target.getPassword(); case "portname": case "portName": return target.getPortName(); case "portprotocol": case "portProtocol": return target.getPortProtocol(); case "trustcerts": case "trustCerts": return target.getTrustCerts(); case "username": return target.getUsername(); default: return null; } } }
KubernetesServiceDiscoveryFactoryConfigurer
java
google__auto
value/src/test/java/com/google/auto/value/processor/TemplateVarsTest.java
{ "start": 3432, "end": 3805 }
class ____ extends TemplateVars { Integer integer; static String string; @Override Template parsedTemplate() { throw new UnsupportedOperationException(); } } @Test public void testStatic() { try { new Static(); fail("Did not get expected exception"); } catch (IllegalArgumentException expected) { } } static
Static
java
quarkusio__quarkus
core/deployment/src/main/java/io/quarkus/deployment/dev/RuntimeUpdatesProcessor.java
{ "start": 2838, "end": 5072 }
class ____ implements HotReplacementContext, Closeable { public static final boolean IS_LINUX = System.getProperty("os.name").toLowerCase(Locale.ENGLISH).contains("linux"); private static final Logger log = Logger.getLogger(RuntimeUpdatesProcessor.class); private static final String CLASS_EXTENSION = ".class"; public static volatile RuntimeUpdatesProcessor INSTANCE; private final Path applicationRoot; private final DevModeContext context; private final QuarkusCompiler compiler; private final DevModeType devModeType; volatile Throwable compileProblem; volatile Throwable testCompileProblem; volatile Throwable hotReloadProblem; private final AtomicReference<Throwable> deploymentProblem; private volatile Predicate<ClassInfo> disableInstrumentationForClassPredicate = new AlwaysFalsePredicate<>(); private volatile Predicate<Index> disableInstrumentationForIndexPredicate = new AlwaysFalsePredicate<>(); private static volatile boolean instrumentationLogPrinted = false; /** * dev mode replacement and test running track their changes separately */ private final TimestampSet main = new TimestampSet(); private final TimestampSet test = new TimestampSet(); final Map<Path, Long> sourceFileTimestamps = new ConcurrentHashMap<>(); private final List<Runnable> preScanSteps = new CopyOnWriteArrayList<>(); private final List<Runnable> postRestartSteps = new CopyOnWriteArrayList<>(); private final List<Consumer<Set<String>>> noRestartChangesConsumers = new CopyOnWriteArrayList<>(); private final List<HotReplacementSetup> hotReplacementSetup = new ArrayList<>(); private final List<Runnable> deploymentFailedStartHandlers = new ArrayList<>(); private final BiConsumer<Set<String>, ClassScanResult> restartCallback; private final BiConsumer<DevModeContext.ModuleInfo, String> copyResourceNotification; private final BiFunction<String, byte[], byte[]> classTransformers; private final ReentrantLock scanLock = new ReentrantLock(); private final Lock codeGenLock = CodeGenLock.lockForCompilation(); /** * The index for the last successful start. Used to determine if the
RuntimeUpdatesProcessor
java
apache__hadoop
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java
{ "start": 17397, "end": 29085 }
class ____ { private final String src; private final String target; private final LinkType linkType; private final String settings; private final UserGroupInformation ugi; private final Configuration config; LinkEntry(String src, String target, LinkType linkType, String settings, UserGroupInformation ugi, Configuration config) { this.src = src; this.target = target; this.linkType = linkType; this.settings = settings; this.ugi = ugi; this.config = config; } String getSrc() { return src; } String getTarget() { return target; } LinkType getLinkType() { return linkType; } boolean isLinkType(LinkType type) { return this.linkType == type; } String getSettings() { return settings; } UserGroupInformation getUgi() { return ugi; } Configuration getConfig() { return config; } } /** * Create Inode Tree from the specified mount-table specified in Config. * * @param config the mount table keys are prefixed with * FsConstants.CONFIG_VIEWFS_PREFIX. * @param viewName the name of the mount table * if null use defaultMT name. * @param theUri heUri. * @param initingUriAsFallbackOnNoMounts initingUriAsFallbackOnNoMounts. * @throws UnsupportedFileSystemException file system for <code>uri</code> is * not found. * @throws URISyntaxException if the URI does not have an authority * it is badly formed. * @throws FileAlreadyExistsException there is a file at the path specified * or is discovered on one of its ancestors. * @throws IOException raised on errors performing I/O. */ protected InodeTree(final Configuration config, final String viewName, final URI theUri, boolean initingUriAsFallbackOnNoMounts) throws UnsupportedFileSystemException, URISyntaxException, FileAlreadyExistsException, IOException { String mountTableName = viewName; if (mountTableName == null) { mountTableName = ConfigUtil.getDefaultMountTableName(config); } homedirPrefix = ConfigUtil.getHomeDirValue(config, mountTableName); isNestedMountPointSupported = ConfigUtil.isNestedMountPointSupported(config); boolean isMergeSlashConfigured = false; String mergeSlashTarget = null; List<LinkEntry> linkEntries = new LinkedList<>(); final String mountTablePrefix = Constants.CONFIG_VIEWFS_PREFIX + "." + mountTableName + "."; final String linkPrefix = Constants.CONFIG_VIEWFS_LINK + "."; final String linkFallbackPrefix = Constants.CONFIG_VIEWFS_LINK_FALLBACK; final String linkMergePrefix = Constants.CONFIG_VIEWFS_LINK_MERGE + "."; final String linkMergeSlashPrefix = Constants.CONFIG_VIEWFS_LINK_MERGE_SLASH; boolean gotMountTableEntry = false; final UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); for (Entry<String, String> si : config) { final String key = si.getKey(); if (!key.startsWith(mountTablePrefix)) { continue; } gotMountTableEntry = true; LinkType linkType; String src = key.substring(mountTablePrefix.length()); String settings = null; if (src.startsWith(linkPrefix)) { src = src.substring(linkPrefix.length()); if (src.equals(SlashPath.toString())) { throw new UnsupportedFileSystemException("Unexpected mount table " + "link entry '" + key + "'. Use " + Constants.CONFIG_VIEWFS_LINK_MERGE_SLASH + " instead!"); } linkType = LinkType.SINGLE; } else if (src.startsWith(linkFallbackPrefix)) { checkMntEntryKeyEqualsTarget(src, linkFallbackPrefix); linkType = LinkType.SINGLE_FALLBACK; } else if (src.startsWith(linkMergePrefix)) { // A merge link src = src.substring(linkMergePrefix.length()); linkType = LinkType.MERGE; } else if (src.startsWith(linkMergeSlashPrefix)) { // This is a LinkMergeSlash entry. This entry should // not have any additional source path. checkMntEntryKeyEqualsTarget(src, linkMergeSlashPrefix); linkType = LinkType.MERGE_SLASH; } else if (src.startsWith(Constants.CONFIG_VIEWFS_LINK_NFLY)) { // prefix.settings.src src = src.substring(Constants.CONFIG_VIEWFS_LINK_NFLY.length() + 1); // settings.src settings = src.substring(0, src.indexOf('.')); // settings // settings.src src = src.substring(settings.length() + 1); // src linkType = LinkType.NFLY; } else if (src.startsWith(Constants.CONFIG_VIEWFS_LINK_REGEX)) { linkEntries.add( buildLinkRegexEntry(config, ugi, src, si.getValue())); continue; } else if (src.startsWith(Constants.CONFIG_VIEWFS_HOMEDIR)) { // ignore - we set home dir from config continue; } else { throw new IOException("ViewFs: Cannot initialize: Invalid entry in " + "Mount table in config: " + src); } final String target = si.getValue(); if (linkType != LinkType.MERGE_SLASH) { if (isMergeSlashConfigured) { throw new IOException("Mount table " + mountTableName + " has already been configured with a merge slash link. " + "A regular link should not be added."); } linkEntries.add( new LinkEntry(src, target, linkType, settings, ugi, config)); } else { if (!linkEntries.isEmpty()) { throw new IOException("Mount table " + mountTableName + " has already been configured with regular links. " + "A merge slash link should not be configured."); } if (isMergeSlashConfigured) { throw new IOException("Mount table " + mountTableName + " has already been configured with a merge slash link. " + "Multiple merge slash links for the same mount table is " + "not allowed."); } isMergeSlashConfigured = true; mergeSlashTarget = target; } } // End of for loop. if (isMergeSlashConfigured) { Preconditions.checkNotNull(mergeSlashTarget); root = new INodeLink<T>(mountTableName, ugi, initAndGetTargetFs(), mergeSlashTarget); mountPoints.add(new MountPoint<T>("/", (INodeLink<T>) root)); rootFallbackLink = null; } else { root = new INodeDir<T>("/", UserGroupInformation.getCurrentUser()); getRootDir().setInternalDirFs(getTargetFileSystem(getRootDir())); getRootDir().setRoot(true); INodeLink<T> fallbackLink = null; for (LinkEntry le : getLinkEntries(linkEntries)) { switch (le.getLinkType()) { case SINGLE_FALLBACK: if (fallbackLink != null) { throw new IOException("Mount table " + mountTableName + " has already been configured with a link fallback. " + "Multiple fallback links for the same mount table is " + "not allowed."); } fallbackLink = new INodeLink<T>(mountTableName, ugi, initAndGetTargetFs(), le.getTarget()); continue; case REGEX: addRegexMountEntry(le); continue; default: createLink(le.getSrc(), le.getTarget(), le.getLinkType(), le.getSettings(), le.getUgi(), le.getConfig()); } } rootFallbackLink = fallbackLink; getRootDir().addFallbackLink(rootFallbackLink); } if (!gotMountTableEntry) { if (!initingUriAsFallbackOnNoMounts) { throw new IOException(new StringBuilder( "ViewFs: Cannot initialize: Empty Mount table in config for ") .append(theUri.getScheme()).append("://").append(mountTableName) .append("/").toString()); } FileSystem.LOG .info("Empty mount table detected for {} and considering itself " + "as a linkFallback.", theUri); rootFallbackLink = new INodeLink<T>(mountTableName, ugi, initAndGetTargetFs(), theUri.toString()); getRootDir().addFallbackLink(rootFallbackLink); } } /** * Get collection of linkEntry. Sort mount point based on alphabetical order of the src paths. * The purpose is to group nested paths(shortest path always comes first) during INodeTree creation. * E.g. /foo is nested with /foo/bar so an INodeDirLink will be created at /foo. * @param linkEntries input linkEntries * @return sorted linkEntries */ private Collection<LinkEntry> getLinkEntries(List<LinkEntry> linkEntries) { Set<LinkEntry> sortedLinkEntries = new TreeSet<>(new Comparator<LinkEntry>() { @Override public int compare(LinkEntry o1, LinkEntry o2) { if (o1 == null) { return -1; } if (o2 == null) { return 1; } String src1 = o1.getSrc(); String src2= o2.getSrc(); return src1.compareTo(src2); } }); sortedLinkEntries.addAll(linkEntries); return sortedLinkEntries; } private void checkMntEntryKeyEqualsTarget( String mntEntryKey, String targetMntEntryKey) throws IOException { if (!mntEntryKey.equals(targetMntEntryKey)) { throw new IOException("ViewFs: Mount points initialization error." + " Invalid " + targetMntEntryKey + " entry in config: " + mntEntryKey); } } private void addRegexMountEntry(LinkEntry le) throws IOException { LOGGER.info("Add regex mount point:" + le.getSrc() + ", target:" + le.getTarget() + ", interceptor settings:" + le.getSettings()); RegexMountPoint regexMountPoint = new RegexMountPoint<T>( this, le.getSrc(), le.getTarget(), le.getSettings()); regexMountPoint.initialize(); regexMountPointList.add(regexMountPoint); } private LinkEntry buildLinkRegexEntry( Configuration config, UserGroupInformation ugi, String mntEntryStrippedKey, String mntEntryValue) { String linkKeyPath = null; String settings = null; final String linkRegexPrefix = Constants.CONFIG_VIEWFS_LINK_REGEX + "."; // settings#.linkKey String settingsAndLinkKeyPath = mntEntryStrippedKey.substring(linkRegexPrefix.length()); int settingLinkKeySepIndex = settingsAndLinkKeyPath .indexOf(RegexMountPoint.SETTING_SRCREGEX_SEP); if (settingLinkKeySepIndex == -1) { // There's no settings linkKeyPath = settingsAndLinkKeyPath; settings = null; } else { // settings#.linkKey style configuration // settings from settings#.linkKey settings = settingsAndLinkKeyPath.substring(0, settingLinkKeySepIndex); // linkKeyPath linkKeyPath = settingsAndLinkKeyPath.substring( settings.length() + RegexMountPoint.SETTING_SRCREGEX_SEP .length()); } return new LinkEntry( linkKeyPath, mntEntryValue, LinkType.REGEX, settings, ugi, config); } /** * Resolve returns ResolveResult. * The caller can continue the resolution of the remainingPath * in the targetFileSystem. * * If the input pathname leads to link to another file system then * the targetFileSystem is the one denoted by the link (except it is * file system chrooted to link target. * If the input pathname leads to an internal mount-table entry then * the target file system is one that represents the internal inode. */ public static
LinkEntry
java
spring-projects__spring-boot
module/spring-boot-r2dbc/src/main/java/org/springframework/boot/r2dbc/testcontainers/OracleXeR2dbcContainerConnectionDetailsFactory.java
{ "start": 1418, "end": 1991 }
class ____ extends ContainerConnectionDetailsFactory<OracleContainer, R2dbcConnectionDetails> { OracleXeR2dbcContainerConnectionDetailsFactory() { super(ANY_CONNECTION_NAME, "io.r2dbc.spi.ConnectionFactoryOptions"); } @Override public R2dbcConnectionDetails getContainerConnectionDetails(ContainerConnectionSource<OracleContainer> source) { return new R2dbcDatabaseContainerConnectionDetails(source); } /** * {@link R2dbcConnectionDetails} backed by a {@link ContainerConnectionSource}. */ private static final
OracleXeR2dbcContainerConnectionDetailsFactory
java
alibaba__nacos
naming/src/test/java/com/alibaba/nacos/naming/utils/ServiceUtilTest.java
{ "start": 811, "end": 1233 }
class ____ { @Test void testSelectInstances() { ServiceInfo serviceInfo = new ServiceInfo(); serviceInfo.setGroupName("groupName"); serviceInfo.setName("serviceName"); serviceInfo.setChecksum("checkSum"); serviceInfo.setAllIps(false); ServiceInfo cluster = ServiceUtil.selectInstances(serviceInfo, "cluster"); assertNotNull(cluster); } }
ServiceUtilTest
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/metamodel/mapping/internal/DiscriminatorValueDetailsImpl.java
{ "start": 311, "end": 813 }
class ____ implements DiscriminatorValueDetails { private final Object value; private final EntityMappingType matchedEntityDescriptor; public DiscriminatorValueDetailsImpl(Object value, EntityMappingType matchedEntityDescriptor) { this.value = value; this.matchedEntityDescriptor = matchedEntityDescriptor; } @Override public Object getValue() { return value; } @Override public EntityMappingType getIndicatedEntity() { return matchedEntityDescriptor; } }
DiscriminatorValueDetailsImpl
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/ImpossibleNullComparisonTest.java
{ "start": 1600, "end": 2609 }
class ____ { void test() { TestProtoMessage message = TestProtoMessage.newBuilder().build(); // BUG: Diagnostic contains: message.hasMessage() if (message.getMessage() != null) {} // BUG: Diagnostic contains: !message.hasMessage() if (message.getMessage() == null) {} // BUG: Diagnostic contains: message.hasMessage() if (null != message.getMessage()) {} // BUG: Diagnostic contains: message.getMessage().hasField() if (message.getMessage().getField() != null) {} } } """) .doTest(); } @Test public void listCases() { compilationHelper .addSourceLines( "Test.java", """ import com.google.errorprone.bugpatterns.proto.ProtoTest.TestProtoMessage; import com.google.errorprone.bugpatterns.proto.ProtoTest.TestFieldProtoMessage;
Test
java
netty__netty
resolver/src/test/java/io/netty/resolver/HostsFileEntriesProviderTest.java
{ "start": 1175, "end": 6110 }
class ____ { @Test void testParse() throws IOException { String hostsString = new StringBuilder() .append("127.0.0.1 host1").append("\n") // single hostname, separated with blanks .append("::1 host1").append("\n") // same as above, but IPv6 .append("\n") // empty line .append("192.168.0.1\thost2").append("\n") // single hostname, separated with tabs .append("#comment").append("\n") // comment at the beginning of the line .append(" #comment ").append("\n") // comment in the middle of the line .append("192.168.0.2 host3 #comment").append("\n") // comment after hostname .append("192.168.0.3 host4 host5 host6").append("\n") // multiple aliases .append("192.168.0.4 host4").append("\n") // host mapped to a second address, must be considered .append("192.168.0.5 HOST7").append("\n") // uppercase host, should match lowercase host .append("192.168.0.6 host7").append("\n") // must be considered .toString(); HostsFileEntriesProvider entries = HostsFileEntriesProvider.parser() .parse(new BufferedReader(new StringReader(hostsString))); Map<String, List<InetAddress>> inet4Entries = entries.ipv4Entries(); Map<String, List<InetAddress>> inet6Entries = entries.ipv6Entries(); assertEquals(7, inet4Entries.size(), "Expected 7 IPv4 entries"); assertEquals(1, inet6Entries.size(), "Expected 1 IPv6 entries"); assertEquals(1, inet4Entries.get("host1").size()); assertEquals("127.0.0.1", inet4Entries.get("host1").get(0).getHostAddress()); assertEquals(1, inet4Entries.get("host2").size()); assertEquals("192.168.0.1", inet4Entries.get("host2").get(0).getHostAddress()); assertEquals(1, inet4Entries.get("host3").size()); assertEquals("192.168.0.2", inet4Entries.get("host3").get(0).getHostAddress()); assertEquals(2, inet4Entries.get("host4").size()); assertEquals("192.168.0.3", inet4Entries.get("host4").get(0).getHostAddress()); assertEquals("192.168.0.4", inet4Entries.get("host4").get(1).getHostAddress()); assertEquals(1, inet4Entries.get("host5").size()); assertEquals("192.168.0.3", inet4Entries.get("host5").get(0).getHostAddress()); assertEquals(1, inet4Entries.get("host6").size()); assertEquals("192.168.0.3", inet4Entries.get("host6").get(0).getHostAddress()); assertNotNull(inet4Entries.get("host7"), "Uppercase host doesn't resolve"); assertEquals(2, inet4Entries.get("host7").size()); assertEquals("192.168.0.5", inet4Entries.get("host7").get(0).getHostAddress()); assertEquals("192.168.0.6", inet4Entries.get("host7").get(1).getHostAddress()); assertEquals(1, inet6Entries.get("host1").size()); assertEquals("0:0:0:0:0:0:0:1", inet6Entries.get("host1").get(0).getHostAddress()); } @Test void testCharsetInputValidation() { assertThrows(NullPointerException.class, new Executable() { @Override public void execute() throws IOException { HostsFileEntriesProvider.parser().parse((Charset[]) null); } }); assertThrows(NullPointerException.class, new Executable() { @Override public void execute() throws IOException { HostsFileEntriesProvider.parser().parse(new File(""), (Charset[]) null); } }); assertThrows(NullPointerException.class, new Executable() { @Override public void execute() { HostsFileEntriesProvider.parser().parseSilently((Charset[]) null); } }); assertThrows(NullPointerException.class, new Executable() { @Override public void execute() { HostsFileEntriesProvider.parser().parseSilently(new File(""), (Charset[]) null); } }); } @Test void testFileInputValidation() { assertThrows(NullPointerException.class, new Executable() { @Override public void execute() throws IOException { HostsFileEntriesProvider.parser().parse((File) null); } }); assertThrows(NullPointerException.class, new Executable() { @Override public void execute() { HostsFileEntriesProvider.parser().parseSilently((File) null); } }); } @Test void testReaderInputValidation() { assertThrows(NullPointerException.class, new Executable() { @Override public void execute() throws IOException { HostsFileEntriesProvider.parser().parse((Reader) null); } }); } }
HostsFileEntriesProviderTest
java
mockito__mockito
mockito-core/src/main/java/org/mockito/internal/debugging/LocationFactory.java
{ "start": 620, "end": 1316 }
interface ____ { Location create(boolean inline); } private static Factory createLocationFactory() { if (Platform.isAndroid() && !AndroidPlatform.isStackWalkerUsable()) { return new Java8LocationFactory(); } try { // On some platforms, like Android, the StackWalker APIs may not be // available, in this case we have to fallback to Java 8 style of stack // traversing. Class.forName("java.lang.StackWalker"); return new DefaultLocationFactory(); } catch (ClassNotFoundException e) { return new Java8LocationFactory(); } } private static final
Factory
java
elastic__elasticsearch
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformProgressTests.java
{ "start": 727, "end": 3182 }
class ____ extends AbstractSerializingTransformTestCase<TransformProgress> { public static TransformProgress randomTransformProgress() { return new TransformProgress( randomBoolean() ? null : randomLongBetween(0, 10000), randomBoolean() ? null : randomLongBetween(0, 10000), randomBoolean() ? null : randomLongBetween(1, 10000) ); } @Override protected TransformProgress doParseInstance(XContentParser parser) throws IOException { return TransformProgress.PARSER.apply(parser, null); } @Override protected TransformProgress createTestInstance() { return randomTransformProgress(); } @Override protected TransformProgress mutateInstance(TransformProgress instance) { return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 } @Override protected Reader<TransformProgress> instanceReader() { return TransformProgress::new; } public void testPercentComplete() { TransformProgress progress = new TransformProgress(0L, 100L, null); assertThat(progress.getPercentComplete(), equalTo(100.0)); progress = new TransformProgress(100L, 0L, null); assertThat(progress.getPercentComplete(), equalTo(0.0)); progress = new TransformProgress(100L, 10000L, null); assertThat(progress.getPercentComplete(), equalTo(100.0)); progress = new TransformProgress(100L, null, null); assertThat(progress.getPercentComplete(), equalTo(0.0)); progress = new TransformProgress(100L, 50L, null); assertThat(progress.getPercentComplete(), closeTo(50.0, 0.000001)); progress = new TransformProgress(null, 50L, 10L); assertThat(progress.getPercentComplete(), is(nullValue())); } public void testConstructor() { IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> new TransformProgress(-1L, null, null)); assertThat(ex.getMessage(), equalTo("[total_docs] must be >0.")); ex = expectThrows(IllegalArgumentException.class, () -> new TransformProgress(1L, -1L, null)); assertThat(ex.getMessage(), equalTo("[docs_processed] must be >0.")); ex = expectThrows(IllegalArgumentException.class, () -> new TransformProgress(1L, 1L, -1L)); assertThat(ex.getMessage(), equalTo("[docs_indexed] must be >0.")); } }
TransformProgressTests
java
google__guava
android/guava-tests/test/com/google/common/util/concurrent/JSR166TestCase.java
{ "start": 31665, "end": 31888 }
class ____ extends CheckedRunnable { @Override protected void realRun() { try { delay(SMALL_DELAY_MS); } catch (InterruptedException ok) { } } } public
SmallPossiblyInterruptedRunnable
java
google__guice
core/test/com/google/inject/CircularDependencyTest.java
{ "start": 18631, "end": 18730 }
class ____ implements I { @Inject IImpl(HImpl i, J j) {} } @SimpleSingleton static
IImpl
java
apache__hadoop
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Chunk.java
{ "start": 1326, "end": 5492 }
class ____ extends InputStream { private DataInputStream in = null; private boolean lastChunk; private int remain = 0; private boolean closed; public ChunkDecoder() { lastChunk = true; closed = true; } public void reset(DataInputStream downStream) { // no need to wind forward the old input. in = downStream; lastChunk = false; remain = 0; closed = false; } /** * Constructor * * @param in * The source input stream which contains chunk-encoded data * stream. */ public ChunkDecoder(DataInputStream in) { this.in = in; lastChunk = false; closed = false; } /** * Have we reached the last chunk. * * @return true if we have reached the last chunk. * @throws java.io.IOException */ public boolean isLastChunk() throws IOException { checkEOF(); return lastChunk; } /** * How many bytes remain in the current chunk? * * @return remaining bytes left in the current chunk. * @throws java.io.IOException */ public int getRemain() throws IOException { checkEOF(); return remain; } /** * Reading the length of next chunk. * * @throws java.io.IOException * when no more data is available. */ private void readLength() throws IOException { remain = Utils.readVInt(in); if (remain >= 0) { lastChunk = true; } else { remain = -remain; } } /** * Check whether we reach the end of the stream. * * @return false if the chunk encoded stream has more data to read (in which * case available() will be greater than 0); true otherwise. * @throws java.io.IOException * on I/O errors. */ private boolean checkEOF() throws IOException { if (isClosed()) return true; while (true) { if (remain > 0) return false; if (lastChunk) return true; readLength(); } } @Override /* * This method never blocks the caller. Returning 0 does not mean we reach * the end of the stream. */ public int available() { return remain; } @Override public int read() throws IOException { if (checkEOF()) return -1; int ret = in.read(); if (ret < 0) throw new IOException("Corrupted chunk encoding stream"); --remain; return ret; } @Override public int read(byte[] b) throws IOException { return read(b, 0, b.length); } @Override public int read(byte[] b, int off, int len) throws IOException { if ((off | len | (off + len) | (b.length - (off + len))) < 0) { throw new IndexOutOfBoundsException(); } if (!checkEOF()) { int n = Math.min(remain, len); int ret = in.read(b, off, n); if (ret < 0) throw new IOException("Corrupted chunk encoding stream"); remain -= ret; return ret; } return -1; } @Override public long skip(long n) throws IOException { if (!checkEOF()) { long ret = in.skip(Math.min(remain, n)); remain -= ret; return ret; } return 0; } @Override public boolean markSupported() { return false; } public boolean isClosed() { return closed; } @Override public void close() throws IOException { if (closed == false) { try { while (!checkEOF()) { skip(Integer.MAX_VALUE); } } finally { closed = true; } } } } /** * Chunk Encoder. Encoding the output data into a chain of chunks in the * following sequences: -len1, byte[len1], -len2, byte[len2], ... len_n, * byte[len_n]. Where len1, len2, ..., len_n are the lengths of the data * chunks. Non-terminal chunks have their lengths negated. Non-terminal chunks * cannot have length 0. All lengths are in the range of 0 to * Integer.MAX_VALUE and are encoded in Utils.VInt format. */ static public
ChunkDecoder
java
apache__flink
flink-dstl/flink-dstl-dfs/src/main/java/org/apache/flink/changelog/fs/DuplicatingOutputStreamWithPos.java
{ "start": 1540, "end": 6116 }
class ____ extends OutputStreamWithPos { private static final Logger LOG = LoggerFactory.getLogger(DuplicatingOutputStreamWithPos.class); private OutputStream secondaryStream; private OutputStream originalSecondaryStream; private final Path secondaryPath; /** * Stores a potential exception that occurred while interacting with {@link #secondaryStream}. */ private Exception secondaryStreamException; public DuplicatingOutputStreamWithPos( OutputStream primaryStream, Path primaryPath, OutputStream secondaryStream, Path secondaryPath) { super(primaryStream, primaryPath); this.secondaryStream = Preconditions.checkNotNull(secondaryStream); this.originalSecondaryStream = Preconditions.checkNotNull(secondaryStream); this.secondaryPath = Preconditions.checkNotNull(secondaryPath); } @Override public void wrap(boolean compression, int bufferSize) throws IOException { super.wrap(compression, bufferSize); this.secondaryStream = wrapInternal(compression, bufferSize, this.originalSecondaryStream); } @Override public void write(int b) throws IOException { outputStream.write(b); if (secondaryStreamException == null) { try { secondaryStream.write(b); } catch (Exception ex) { handleSecondaryStreamOnException(ex); } } pos++; } @Override public void write(byte[] b) throws IOException { outputStream.write(b); if (secondaryStreamException == null) { try { secondaryStream.write(b); } catch (Exception ex) { LOG.warn("Exception encountered during write to secondary stream"); handleSecondaryStreamOnException(ex); } } pos += b.length; } @Override public void write(byte[] b, int off, int len) throws IOException { outputStream.write(b, off, len); if (secondaryStreamException == null) { try { secondaryStream.write(b, off, len); } catch (Exception ex) { LOG.warn("Exception encountered during writing to secondary stream"); handleSecondaryStreamOnException(ex); } } pos += len; } @Override public void flush() throws IOException { outputStream.flush(); if (secondaryStreamException == null) { try { secondaryStream.flush(); } catch (Exception ex) { LOG.warn("Exception encountered during flushing secondary stream"); handleSecondaryStreamOnException(ex); } } } @Override public void close() throws IOException { Exception exCollector = null; try { super.close(); } catch (Exception closeEx) { exCollector = ExceptionUtils.firstOrSuppressed(closeEx, exCollector); } if (secondaryStreamException == null) { try { secondaryStream.close(); originalSecondaryStream.close(); } catch (Exception closeEx) { getSecondaryPath().getFileSystem().delete(getSecondaryPath(), true); handleSecondaryStreamOnException(closeEx); } } if (exCollector != null) { throw new IOException("Exception while closing duplicating stream.", exCollector); } } private void handleSecondaryStreamOnException(Exception ex) { Preconditions.checkState( secondaryStreamException == null, "Secondary stream already failed from previous exception!"); try { secondaryStream.close(); } catch (Exception closeEx) { ex = ExceptionUtils.firstOrSuppressed(closeEx, ex); } secondaryStreamException = Preconditions.checkNotNull(ex); } public Path getSecondaryPath() { return secondaryPath; } public StreamStateHandle getSecondaryHandle( BiFunction<Path, Long, StreamStateHandle> handleFactory) throws IOException { if (secondaryStreamException == null) { return handleFactory.apply(secondaryPath, this.pos); } else { throw new IOException( "Secondary stream previously failed exceptionally", secondaryStreamException); } } }
DuplicatingOutputStreamWithPos
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFSQueueMetrics.java
{ "start": 1750, "end": 1795 }
class ____ {@link FSQueueMetrics}. */ public
for
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java
{ "start": 5787, "end": 13528 }
class ____ { final Task task; final Request request; final ActionListener<Response> listener; final ClusterState clusterState; final DiscoveryNodes nodes; final List<? extends ShardIterator> shardsIts; final int expectedOps; final AtomicInteger counterOps = new AtomicInteger(); // ShardResponse or Exception protected final AtomicReferenceArray<Object> shardsResponses; protected AsyncBroadcastAction(Task task, Request request, ActionListener<Response> listener) { this.task = task; this.request = request; this.listener = listener; clusterState = clusterService.state(); ClusterBlockException blockException = checkGlobalBlock(clusterState, request); if (blockException != null) { throw blockException; } // update to concrete indices String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, request); blockException = checkRequestBlock(clusterState, request, concreteIndices); if (blockException != null) { throw blockException; } nodes = clusterState.nodes(); logger.trace("resolving shards based on cluster state version [{}]", clusterState.version()); shardsIts = shards(clusterState, request, concreteIndices); expectedOps = shardsIts.size(); shardsResponses = new AtomicReferenceArray<>(expectedOps); } public void start() { if (shardsIts.size() == 0) { // no shards ActionListener.completeWith(listener, () -> newResponse(request, new AtomicReferenceArray<ShardResponse>(0), clusterState)); return; } // count the local operations, and perform the non local ones int shardIndex = -1; for (final ShardIterator shardIt : shardsIts) { shardIndex++; final ShardRouting shard = shardIt.nextOrNull(); if (shard != null) { performOperation(shardIt, shard, shardIndex); } else { // really, no shards active in this group onOperation(null, shardIt, shardIndex, new NoShardAvailableActionException(shardIt.shardId())); } } } protected void performOperation(final ShardIterator shardIt, final ShardRouting shard, final int shardIndex) { if (shard == null) { // no more active shards... (we should not really get here, just safety) onOperation(null, shardIt, shardIndex, new NoShardAvailableActionException(shardIt.shardId())); } else { try { final ShardRequest shardRequest = newShardRequest(shardIt.size(), shard, request); shardRequest.setParentTask(clusterService.localNode().getId(), task.getId()); DiscoveryNode node = nodes.get(shard.currentNodeId()); if (node == null) { // no node connected, act as failure onOperation(shard, shardIt, shardIndex, new NoShardAvailableActionException(shardIt.shardId())); } else { sendShardRequest( node, shardRequest, ActionListener.wrap(r -> onOperation(shard, shardIndex, r), e -> onOperation(shard, shardIt, shardIndex, e)) ); } } catch (Exception e) { onOperation(shard, shardIt, shardIndex, e); } } } protected void sendShardRequest(DiscoveryNode node, ShardRequest shardRequest, ActionListener<ShardResponse> listener) { transportService.sendRequest( node, transportShardAction, shardRequest, new ActionListenerResponseHandler<>(listener, TransportBroadcastAction.this::readShardResponse, executor) ); } protected void onOperation(ShardRouting shard, int shardIndex, ShardResponse response) { logger.trace("received response for {}", shard); shardsResponses.set(shardIndex, response); if (expectedOps == counterOps.incrementAndGet()) { finishHim(); } } void onOperation(@Nullable ShardRouting shard, final ShardIterator shardIt, int shardIndex, Exception e) { // we set the shard failure always, even if its the first in the replication group, and the next one // will work (it will just override it...) setFailure(shardIt, shardIndex, e); ShardRouting nextShard = shardIt.nextOrNull(); if (nextShard != null) { if (e != null) { if (logger.isTraceEnabled()) { if (TransportActions.isShardNotAvailableException(e) == false) { logger.trace( () -> format( "%s: failed to execute [%s]", shard != null ? shard.shortSummary() : shardIt.shardId(), request ), e ); } } } performOperation(shardIt, nextShard, shardIndex); } else { if (logger.isDebugEnabled()) { if (e != null) { if (TransportActions.isShardNotAvailableException(e) == false) { logger.debug( () -> format( "%s: failed to execute [%s]", shard != null ? shard.shortSummary() : shardIt.shardId(), request ), e ); } } } if (expectedOps == counterOps.incrementAndGet()) { finishHim(); } } } protected void finishHim() { assert Transports.assertNotTransportThread("O(#shards) work must always fork to an appropriate executor"); ActionListener.completeWith(listener, () -> newResponse(request, shardsResponses, clusterState)); } void setFailure(ShardIterator shardIt, int shardIndex, Exception e) { if ((e instanceof BroadcastShardOperationFailedException) == false) { e = new BroadcastShardOperationFailedException(shardIt.shardId(), e); } Object response = shardsResponses.get(shardIndex); if (response == null) { // just override it and return shardsResponses.set(shardIndex, e); } if ((response instanceof Throwable) == false) { // we should never really get here... return; } // the failure is already present, try and not override it with an exception that is less meaningless // for example, getting illegal shard state if (TransportActions.isReadOverrideException(e)) { shardsResponses.set(shardIndex, e); } } } }
AsyncBroadcastAction
java
square__okhttp
samples/guide/src/main/java/okhttp3/recipes/CacheResponse.java
{ "start": 790, "end": 2472 }
class ____ { private final OkHttpClient client; public CacheResponse(File cacheDirectory) throws Exception { int cacheSize = 10 * 1024 * 1024; // 10 MiB Cache cache = new Cache(cacheDirectory, cacheSize); client = new OkHttpClient.Builder() .cache(cache) .build(); } public void run() throws Exception { Request request = new Request.Builder() .url("http://publicobject.com/helloworld.txt") .build(); String response1Body; try (Response response1 = client.newCall(request).execute()) { if (!response1.isSuccessful()) throw new IOException("Unexpected code " + response1); response1Body = response1.body().string(); System.out.println("Response 1 response: " + response1); System.out.println("Response 1 cache response: " + response1.cacheResponse()); System.out.println("Response 1 network response: " + response1.networkResponse()); } String response2Body; try (Response response2 = client.newCall(request).execute()) { if (!response2.isSuccessful()) throw new IOException("Unexpected code " + response2); response2Body = response2.body().string(); System.out.println("Response 2 response: " + response2); System.out.println("Response 2 cache response: " + response2.cacheResponse()); System.out.println("Response 2 network response: " + response2.networkResponse()); } System.out.println("Response 2 equals Response 1? " + response1Body.equals(response2Body)); } public static void main(String... args) throws Exception { new CacheResponse(new File("CacheResponse.tmp")).run(); } }
CacheResponse
java
apache__camel
core/camel-core-processor/src/main/java/org/apache/camel/processor/resequencer/SequenceSender.java
{ "start": 1019, "end": 1243 }
interface ____<E> { /** * Sends the given element. * * @param o a re-ordered element. * @throws Exception if delivery fails. */ void sendElement(E o) throws Exception; }
SequenceSender
java
apache__camel
components/camel-webhook/src/test/java/org/apache/camel/component/webhook/support/TestEndpoint.java
{ "start": 1367, "end": 4459 }
class ____ extends DefaultEndpoint implements WebhookCapableEndpoint { private static final List<String> DEFAULT_METHOD = Collections.unmodifiableList(Collections.singletonList("POST")); private Function<Processor, Processor> webhookHandler; private Runnable register; private Runnable unregister; private Supplier<List<String>> methods; private Supplier<Producer> producer; private Function<Processor, Consumer> consumer; private WebhookConfiguration webhookConfiguration; private boolean singleton; /** * For query parameter testing 1 */ private String foo; /** * For query parameter testing 2 */ private String bar; public TestEndpoint(String endpointUri, Component component) { super(endpointUri, component); } @Override public Processor createWebhookHandler(Processor next) { if (this.webhookHandler != null) { return this.webhookHandler.apply(next); } return next; } @Override public void registerWebhook() { if (this.register != null) { this.register.run(); } } @Override public void unregisterWebhook() { if (this.unregister != null) { this.unregister.run(); } } @Override public void setWebhookConfiguration(WebhookConfiguration webhookConfiguration) { this.webhookConfiguration = webhookConfiguration; } public WebhookConfiguration getWebhookConfiguration() { return webhookConfiguration; } @Override public List<String> getWebhookMethods() { return this.methods != null ? this.methods.get() : DEFAULT_METHOD; } @Override public Producer createProducer() { return this.producer != null ? this.producer.get() : null; } @Override public Consumer createConsumer(Processor processor) { return this.consumer != null ? this.consumer.apply(processor) : null; } @Override public boolean isSingleton() { return singleton; } public void setSingleton(boolean singleton) { this.singleton = singleton; } public void setWebhookHandler(Function<Processor, Processor> webhookHandler) { this.webhookHandler = webhookHandler; } public void setRegisterWebhook(Runnable register) { this.register = register; } public void setUnregisterWebhook(Runnable unregister) { this.unregister = unregister; } public void setWebhookMethods(Supplier<List<String>> methods) { this.methods = methods; } public void setWebhookProducer(Supplier<Producer> producer) { this.producer = producer; } public void setConsumer(Function<Processor, Consumer> consumer) { this.consumer = consumer; } public String getFoo() { return foo; } public void setFoo(String foo) { this.foo = foo; } public String getBar() { return bar; } public void setBar(String bar) { this.bar = bar; } }
TestEndpoint
java
apache__camel
components/camel-aws/camel-aws2-kinesis/src/test/java/org/apache/camel/component/aws2/kinesis/Kinesis2ConsumerHealthCustomClientIT.java
{ "start": 2160, "end": 4507 }
class ____ extends CamelTestSupport { @RegisterExtension public static AWSService service = AWSServiceFactory.createSingletonS3Service(); CamelContext context; @Override protected CamelContext createCamelContext() throws Exception { context = super.createCamelContext(); context.getPropertiesComponent().setLocation("ref:prop"); Kinesis2Component component = new Kinesis2Component(context); component.getConfiguration().setAmazonKinesisClient(AWSSDKClientUtils.newKinesisClient()); component.init(); context.addComponent("aws2-kinesis", component); HealthCheckRegistry registry = new DefaultHealthCheckRegistry(); registry.setCamelContext(context); Object hc = registry.resolveById("context"); registry.register(hc); hc = registry.resolveById("routes"); registry.register(hc); hc = registry.resolveById("consumers"); registry.register(hc); HealthCheckRepository hcr = (HealthCheckRepository) registry.resolveById("producers"); hcr.setEnabled(true); registry.register(hcr); context.getCamelContextExtension().addContextPlugin(HealthCheckRegistry.class, registry); return context; } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { @Override public void configure() { from("aws2-kinesis://stream").startupOrder(2).log("${body}").routeId("test-health-it"); } }; } @Test @Order(1) public void testLiveness() { Collection<HealthCheck.Result> res = HealthCheckHelper.invokeLiveness(context); boolean up = res.stream().allMatch(r -> r.getState().equals(HealthCheck.State.UP)); Assertions.assertTrue(up, "liveness check"); } @Test @Order(2) public void testReadinessWhenDown() { // health-check readiness should be down await().atMost(20, TimeUnit.SECONDS).untilAsserted(() -> { Collection<HealthCheck.Result> res2 = HealthCheckHelper.invokeReadiness(context); boolean down = res2.stream().allMatch(r -> r.getState().equals(HealthCheck.State.DOWN)); Assertions.assertTrue(down, "liveness check"); }); } }
Kinesis2ConsumerHealthCustomClientIT
java
netty__netty
codec-dns/src/main/java/io/netty/handler/codec/dns/DefaultDnsOptEcsRecord.java
{ "start": 939, "end": 4479 }
class ____ extends AbstractDnsOptPseudoRrRecord implements DnsOptEcsRecord { private final int srcPrefixLength; private final byte[] address; /** * Creates a new instance. * * @param maxPayloadSize the suggested max payload size in bytes * @param extendedRcode the extended rcode * @param version the version * @param srcPrefixLength the prefix length * @param address the bytes of the {@link InetAddress} to use */ public DefaultDnsOptEcsRecord(int maxPayloadSize, int extendedRcode, int version, int srcPrefixLength, byte[] address) { super(maxPayloadSize, extendedRcode, version); this.srcPrefixLength = srcPrefixLength; this.address = verifyAddress(address).clone(); } /** * Creates a new instance. * * @param maxPayloadSize the suggested max payload size in bytes * @param srcPrefixLength the prefix length * @param address the bytes of the {@link InetAddress} to use */ public DefaultDnsOptEcsRecord(int maxPayloadSize, int srcPrefixLength, byte[] address) { this(maxPayloadSize, 0, 0, srcPrefixLength, address); } /** * Creates a new instance. * * @param maxPayloadSize the suggested max payload size in bytes * @param protocolFamily the {@link InternetProtocolFamily} to use. This should be the same as the one used to * send the query. * @deprecated use {@link DefaultDnsOptEcsRecord#DefaultDnsOptEcsRecord(int, SocketProtocolFamily)} */ @Deprecated public DefaultDnsOptEcsRecord(int maxPayloadSize, InternetProtocolFamily protocolFamily) { this(maxPayloadSize, 0, 0, 0, protocolFamily.localhost().getAddress()); } /** * Creates a new instance. * * @param maxPayloadSize the suggested max payload size in bytes * @param socketProtocolFamily the {@link SocketProtocolFamily} to use. This should be the same as the one used to * send the query. */ public DefaultDnsOptEcsRecord(int maxPayloadSize, SocketProtocolFamily socketProtocolFamily) { this(maxPayloadSize, 0, 0, 0, localAddress(socketProtocolFamily)); } private static byte[] localAddress(SocketProtocolFamily family) { switch (family) { case INET: return NetUtil.LOCALHOST4.getAddress(); case INET6: return NetUtil.LOCALHOST6.getAddress(); default: return null; } } private static byte[] verifyAddress(byte[] bytes) { if (bytes != null && bytes.length == 4 || bytes.length == 16) { return bytes; } throw new IllegalArgumentException("bytes.length must either 4 or 16"); } @Override public int sourcePrefixLength() { return srcPrefixLength; } @Override public int scopePrefixLength() { return 0; } @Override public byte[] address() { return address.clone(); } @Override public String toString() { StringBuilder sb = toStringBuilder(); sb.setLength(sb.length() - 1); return sb.append(" address:") .append(Arrays.toString(address)) .append(" sourcePrefixLength:") .append(sourcePrefixLength()) .append(" scopePrefixLength:") .append(scopePrefixLength()) .append(')').toString(); } }
DefaultDnsOptEcsRecord
java
grpc__grpc-java
okhttp/src/main/java/io/grpc/okhttp/OkHttpReadableBuffer.java
{ "start": 922, "end": 2574 }
class ____ extends AbstractReadableBuffer { private final okio.Buffer buffer; OkHttpReadableBuffer(okio.Buffer buffer) { this.buffer = buffer; } @Override public int readableBytes() { return (int) buffer.size(); } @Override public int readUnsignedByte() { try { fakeEofExceptionMethod(); // Okio 2.x can throw EOFException from readByte() return buffer.readByte() & 0x000000FF; } catch (EOFException e) { throw new IndexOutOfBoundsException(e.getMessage()); } } private void fakeEofExceptionMethod() throws EOFException {} @Override public void skipBytes(int length) { try { buffer.skip(length); } catch (EOFException e) { throw new IndexOutOfBoundsException(e.getMessage()); } } @Override public void readBytes(byte[] dest, int destOffset, int length) { while (length > 0) { int bytesRead = buffer.read(dest, destOffset, length); if (bytesRead == -1) { throw new IndexOutOfBoundsException("EOF trying to read " + length + " bytes"); } length -= bytesRead; destOffset += bytesRead; } } @Override public void readBytes(ByteBuffer dest) { // We are not using it. throw new UnsupportedOperationException(); } @Override public void readBytes(OutputStream dest, int length) throws IOException { buffer.writeTo(dest, length); } @Override public ReadableBuffer readBytes(int length) { okio.Buffer buf = new okio.Buffer(); buf.write(buffer, length); return new OkHttpReadableBuffer(buf); } @Override public void close() { buffer.clear(); } }
OkHttpReadableBuffer
java
apache__flink
flink-core/src/test/java/org/apache/flink/api/common/serialization/AbstractDeserializationSchemaTest.java
{ "start": 4715, "end": 4938 }
class ____<T> extends AbstractDeserializationSchema<T> { @Override public T deserialize(byte[] message) { throw new UnsupportedOperationException(); } } private static
GenericSchema
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSyntheticWriterHelper.java
{ "start": 3218, "end": 5301 }
class ____ { public static final KeyValue EMPTY = new KeyValue(null, new Prefix(), null); private final String value; private final Prefix prefix; private final String leaf; private KeyValue(final String value, final Prefix prefix, final String leaf) { this.value = value; this.prefix = prefix; this.leaf = leaf; } KeyValue(final BytesRef keyValue) { this( // Splitting with a negative limit includes trailing empty strings. // This is needed in case the provide path has trailing path separators. FlattenedFieldParser.extractKey(keyValue).utf8ToString().split(PATH_SEPARATOR_PATTERN, -1), FlattenedFieldParser.extractValue(keyValue).utf8ToString() ); } private KeyValue(final String[] key, final String value) { this(value, new Prefix(key), key[key.length - 1]); } private static KeyValue fromBytesRef(final BytesRef keyValue) { return keyValue == null ? EMPTY : new KeyValue(keyValue); } public String leaf() { return this.leaf; } public String value() { assert this.value != null; return this.value; } @Override public int hashCode() { return Objects.hash(this.value, this.prefix, this.leaf); } public boolean pathEquals(final KeyValue other) { return prefix.equals(other.prefix) && leaf.equals(other.leaf); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } KeyValue other = (KeyValue) obj; return Objects.equals(this.value, other.value) && Objects.equals(this.prefix, other.prefix) && Objects.equals(this.leaf, other.leaf); } } public
KeyValue
java
apache__camel
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/GithubComponentBuilderFactory.java
{ "start": 1357, "end": 1798 }
interface ____ { /** * GitHub (camel-github) * Interact with the GitHub API. * * Category: file,cloud,api * Since: 2.15 * Maven coordinates: org.apache.camel:camel-github * * @return the dsl builder */ static GithubComponentBuilder github() { return new GithubComponentBuilderImpl(); } /** * Builder for the GitHub component. */
GithubComponentBuilderFactory
java
apache__flink
flink-core/src/test/java/org/apache/flink/util/TemporaryClassLoaderContextTest.java
{ "start": 1090, "end": 1846 }
class ____ { @Test void testTemporaryClassLoaderContext() { final ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); final ChildFirstClassLoader temporaryClassLoader = new ChildFirstClassLoader( new URL[0], contextClassLoader, new String[0], NOOP_EXCEPTION_HANDLER); try (TemporaryClassLoaderContext ignored = TemporaryClassLoaderContext.of(temporaryClassLoader)) { assertThat(Thread.currentThread().getContextClassLoader()) .isEqualTo(temporaryClassLoader); } assertThat(Thread.currentThread().getContextClassLoader()).isEqualTo(contextClassLoader); } }
TemporaryClassLoaderContextTest
java
google__error-prone
core/src/test/java/com/google/errorprone/refaster/UMemberSelectTest.java
{ "start": 1005, "end": 2306 }
class ____ extends AbstractUTreeTest { @Test public void inline() { ULiteral fooLit = ULiteral.stringLit("foo"); UType type = mock(UType.class); UMemberSelect memberSelect = UMemberSelect.create(fooLit, "length", type); assertInlines("\"foo\".length", memberSelect); } @Test public void equality() { UType stringTy = UClassType.create("java.lang.String"); // int String.indexOf(int) UMethodType indexOfIntTy = UMethodType.create(UPrimitiveType.INT, UPrimitiveType.INT); // int String.indexOf(String) UMethodType indexOfStringTy = UMethodType.create(UPrimitiveType.INT, stringTy); UExpression fooLit = ULiteral.stringLit("foo"); UExpression barLit = ULiteral.stringLit("bar"); new EqualsTester() .addEqualityGroup(UMemberSelect.create(fooLit, "indexOf", indexOfIntTy)) .addEqualityGroup(UMemberSelect.create(fooLit, "indexOf", indexOfStringTy)) .addEqualityGroup(UMemberSelect.create(barLit, "indexOf", indexOfIntTy)) .testEquals(); } @Test public void serialization() { SerializableTester.reserializeAndAssert( UMemberSelect.create( ULiteral.stringLit("foo"), "indexOf", UMethodType.create(UPrimitiveType.INT, UPrimitiveType.INT))); } }
UMemberSelectTest
java
apache__hadoop
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/SourceWrappedStatistics.java
{ "start": 1111, "end": 1455 }
class ____ implements IOStatisticsSource { private final IOStatistics source; /** * Constructor. * @param source source of statistics. */ public SourceWrappedStatistics(final IOStatistics source) { this.source = source; } @Override public IOStatistics getIOStatistics() { return source; } }
SourceWrappedStatistics
java
apache__flink
flink-core/src/main/java/org/apache/flink/api/common/accumulators/IntMinimum.java
{ "start": 1001, "end": 2712 }
class ____ implements SimpleAccumulator<Integer> { private static final long serialVersionUID = 1L; private int min = Integer.MAX_VALUE; public IntMinimum() {} public IntMinimum(int value) { this.min = value; } // ------------------------------------------------------------------------ // Accumulator // ------------------------------------------------------------------------ /** Consider using {@link #add(int)} instead for primitive integer values */ @Override public void add(Integer value) { this.min = Math.min(this.min, value); } @Override public Integer getLocalValue() { return this.min; } @Override public void merge(Accumulator<Integer, Integer> other) { this.min = Math.min(this.min, other.getLocalValue()); } @Override public void resetLocal() { this.min = Integer.MAX_VALUE; } @Override public IntMinimum clone() { IntMinimum clone = new IntMinimum(); clone.min = this.min; return clone; } // ------------------------------------------------------------------------ // Primitive Specializations // ------------------------------------------------------------------------ public void add(int value) { this.min = Math.min(this.min, value); } public int getLocalValuePrimitive() { return this.min; } // ------------------------------------------------------------------------ // Utilities // ------------------------------------------------------------------------ @Override public String toString() { return "IntMinimum " + this.min; } }
IntMinimum
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterRpcMultiDestination.java
{ "start": 3956, "end": 4101 }
interface ____ the {@link getRouter()} implemented by * {@link RouterRpcServer}. */ @TestMethodOrder(MethodOrderer.OrderAnnotation.class) public
of
java
quarkusio__quarkus
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/alternatives/priority/AlternativePriorityAnnotationTest.java
{ "start": 1844, "end": 1932 }
interface ____ { String ping(); } @ApplicationScoped static
MyInterface
java
apache__camel
test-infra/camel-test-infra-tensorflow-serving/src/test/java/org/apache/camel/test/infra/tensorflow/serving/services/TensorFlowServingRemoteService.java
{ "start": 967, "end": 1666 }
class ____ implements TensorFlowServingService { @Override public void registerProperties() { // NO-OP } @Override public void initialize() { registerProperties(); } @Override public void shutdown() { // NO-OP } @Override public int grpcPort() { String value = System.getProperty(TensorFlowServingProperties.TENSORFLOW_SERVING_GRPC_PORT, "8500"); return Integer.parseInt(value); } @Override public int restPort() { String value = System.getProperty(TensorFlowServingProperties.TENSORFLOW_SERVING_REST_PORT, "8501"); return Integer.parseInt(value); } }
TensorFlowServingRemoteService