language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/SealedTypesWithTypedDeserializationTest.java | {
"start": 1749,
"end": 2029
} | class ____ extends Animal
{
@JsonCreator
public Fish()
{
super(null);
}
}
// [databind#2467]: Allow missing "content" for as-array deserialization
@JsonDeserialize(using = NullAnimalDeserializer.class)
static final | Fish |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/time/TimeInStaticInitializerTest.java | {
"start": 1166,
"end": 1518
} | class ____ {
// BUG: Diagnostic contains:
private static final Instant NOW = Instant.now();
}
""")
.doTest();
}
@Test
public void negative_instanceField() {
helper
.addSourceLines(
"Test.java",
"""
import java.time.Instant;
| Test |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/event/AsyncDispatcher.java | {
"start": 2241,
"end": 2360
} | class ____ a thread pool can be used to dispatch the events.
*/
@SuppressWarnings("rawtypes")
@Public
@Evolving
public | and |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/onetomany/OneToManyTest.java | {
"start": 3202,
"end": 17812
} | class ____ implements SettingProvider.Provider<CollectionClassification> {
@Override
public CollectionClassification getSetting() {
return CollectionClassification.BAG;
}
}
@AfterEach
public void afterEach(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncateMappedObjects();
}
@Test
public void testColumnDefinitionPropagation(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Politician casimir = new Politician();
casimir.setName( "Casimir" );
PoliticalParty dream = new PoliticalParty();
dream.setName( "Dream" );
dream.addPolitician( casimir );
session.persist( dream );
session.getTransaction().commit();
session.clear();
session.beginTransaction();
session.remove( session.find( PoliticalParty.class, dream.getName() ) );
}
);
}
@Test
public void testListWithBagSemanticAndOrderBy(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
City paris = new City();
paris.setName( "Paris" );
session.persist( paris );
Street rochechoir = new Street();
rochechoir.setStreetName( "Rochechoir" );
rochechoir.setCity( paris );
Street chmpsElysees = new Street();
chmpsElysees.setStreetName( "Champs Elysees" );
chmpsElysees.setCity( paris );
Street grandeArmee = new Street();
grandeArmee.setStreetName( "Grande Armee" );
grandeArmee.setCity( paris );
session.persist( rochechoir );
session.persist( chmpsElysees );
session.persist( grandeArmee );
paris.addMainStreet( chmpsElysees );
paris.addMainStreet( grandeArmee );
session.flush();
session.clear();
// Assert the primary key value relationship amongst the 3 streets...
assertThat( rochechoir.getId() ).isLessThan( chmpsElysees.getId() );
assertThat( chmpsElysees.getId() ).isLessThan( grandeArmee.getId() );
paris = session.find( City.class, paris.getId() );
// City.streets is defined to be ordered by name primarily...
assertThat( paris.getStreets().size() ).isEqualTo( 3 );
assertThat( paris.getStreets().get( 0 ).getStreetName() ).isEqualTo( chmpsElysees.getStreetName() );
assertThat( paris.getStreets().get( 1 ).getStreetName() ).isEqualTo( grandeArmee.getStreetName() );
// City.mainStreets is defined to be ordered by street id
List<Street> mainStreets = paris.getMainStreets();
assertThat( mainStreets.size() ).isEqualTo( 2 );
Integer previousId = -1;
for ( Street street : mainStreets ) {
assertThat( previousId ).isLessThan( street.getId() );
previousId = street.getId();
}
}
);
}
@Test
public void testUnidirectionalDefault(SessionFactoryScope scope) {
Trainer t = new Trainer();
t.setName( "First trainer" );
Tiger regularTiger = new Tiger();
regularTiger.setName( "Regular Tiger" );
Tiger whiteTiger = new Tiger();
whiteTiger.setName( "White Tiger" );
t.setTrainedTigers( new HashSet<>() );
scope.inTransaction(
session -> {
session.persist( t );
session.persist( regularTiger );
session.persist( whiteTiger );
t.getTrainedTigers().add( regularTiger );
t.getTrainedTigers().add( whiteTiger );
}
);
scope.inTransaction(
session -> {
Trainer trainer = session.find( Trainer.class, t.getId() );
assertThat( trainer ).isNotNull();
assertThat( trainer.getTrainedTigers() ).isNotNull();
assertThat( trainer.getTrainedTigers().size() ).isEqualTo( 2 );
}
);
assertThrows( ConstraintViolationException.class, () -> scope.inSession(
session -> {
Trainer trainer = new Trainer();
trainer.setName( "new trainer" );
trainer.setTrainedTigers( new HashSet<>() );
trainer.getTrainedTigers().add( whiteTiger );
try {
session.getTransaction().begin();
session.persist( trainer );
session.getTransaction().commit();
fail( "A one to many should not allow several trainer per Tiger" );
}
finally {
if ( session.getTransaction().isActive() ) {
session.getTransaction().rollback();
}
}
}
) );
}
@Test
public void testUnidirectionalExplicit(SessionFactoryScope scope) {
Trainer t = new Trainer();
t.setName( "First trainer" );
Monkey regularMonkey = new Monkey();
regularMonkey.setName( "Regular Monkey" );
Monkey miniMonkey = new Monkey();
miniMonkey.setName( "Mini Monkey" );
t.setTrainedMonkeys( new HashSet<>() );
scope.inTransaction(
session -> {
session.persist( t );
session.persist( regularMonkey );
session.persist( miniMonkey );
t.getTrainedMonkeys().add( regularMonkey );
t.getTrainedMonkeys().add( miniMonkey );
}
);
scope.inTransaction(
session -> {
Trainer trainer = session.find( Trainer.class, t.getId() );
assertThat( trainer ).isNotNull();
assertThat( trainer.getTrainedMonkeys() ).isNotNull();
assertThat( trainer.getTrainedMonkeys().size() ).isEqualTo( 2 );
//test suppression of trainer wo monkey
final Set<Monkey> monkeySet = new HashSet<>( trainer.getTrainedMonkeys() );
session.remove( trainer );
session.flush();
session.getTransaction().commit();
session.clear();
session.beginTransaction();
for ( Monkey m : monkeySet ) {
final Object managedMonkey = session.find( Monkey.class, m.getId() );
assertThat( managedMonkey )
.describedAs( "No trainers but monkeys should still be here" )
.isNotNull();
}
//clean up
for ( Monkey m : monkeySet ) {
final Object managedMonkey = session.find( Monkey.class, m.getId() );
session.remove( managedMonkey );
}
session.flush();
}
);
}
@Test
public void testFetching(SessionFactoryScope scope) {
Troop troop = new Troop();
Soldier rambo = new Soldier();
scope.inTransaction(
session -> {
troop.setName( "Final cut" );
Soldier vandamme = new Soldier();
vandamme.setName( "JC Vandamme" );
troop.addSoldier( vandamme );
rambo.setName( "Rambo" );
troop.addSoldier( rambo );
session.persist( troop );
}
);
scope.inTransaction(
session -> {
Troop t = session.find( Troop.class, troop.getId() );
assertThat( t.getSoldiers() ).isNotNull();
assertThat( Hibernate.isInitialized( t.getSoldiers() ) ).isFalse();
assertThat( t.getSoldiers().size() ).isEqualTo( 2 );
assertThat( t.getSoldiers().iterator().next().getName() ).isEqualTo( rambo.getName() );
}
);
scope.inTransaction(
session -> {
Troop t = session.createQuery( "from " + Troop.class.getName() + " as t where t.id = :id",
Troop.class )
.setParameter( "id", troop.getId() ).uniqueResult();
assertThat( Hibernate.isInitialized( t.getSoldiers() ) ).isFalse();
}
);
scope.inTransaction(
session -> {
Soldier r = session.find( Soldier.class, rambo.getId() );
assertThat( Hibernate.isInitialized( r.getTroop() ) ).isTrue();
}
);
scope.inTransaction(
session -> {
Soldier r = session.createQuery( "from " + Soldier.class.getName() + " as s where s.id = :rid",
Soldier.class )
.setParameter( "rid", rambo.getId() )
.uniqueResult();
assertThat( Hibernate.isInitialized( r.getTroop() ) )
.describedAs( "fetching strategy used when we do query" )
.isTrue();
}
);
}
@Test
public void testCascadeDeleteOrphan(SessionFactoryScope scope) {
Troop disney = new Troop();
Soldier mickey = new Soldier();
scope.inTransaction(
session -> {
disney.setName( "Disney" );
mickey.setName( "Mickey" );
disney.addSoldier( mickey );
session.persist( disney );
}
);
Troop troop = scope.fromTransaction(
session -> {
Troop t = session.find( Troop.class, disney.getId() );
t.getSoldiers().iterator().next();
return t;
}
);
troop.getSoldiers().clear();
scope.inTransaction(
session ->
session.merge( troop )
);
scope.inTransaction(
session -> {
Soldier soldier = session.find( Soldier.class, mickey.getId() );
assertThat( soldier )
.describedAs( "delete-orphan should work" )
.isNull();
session.remove( session.find( Troop.class, disney.getId() ) );
}
);
}
@Test
public void testCascadeDelete(SessionFactoryScope scope) {
Troop disney = new Troop();
Soldier mickey = new Soldier();
scope.inTransaction(
session -> {
disney.setName( "Disney" );
mickey.setName( "Mickey" );
disney.addSoldier( mickey );
session.persist( disney );
}
);
scope.inTransaction(
session -> {
Troop troop = session.find( Troop.class, disney.getId() );
session.remove( troop );
}
);
scope.inTransaction(
session -> {
Soldier soldier = session.find( Soldier.class, mickey.getId() );
assertThat( soldier )
.describedAs( "delete-orphan should work" )
.isNull();
}
);
}
@Test
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsCascadeDeleteCheck.class)
public void testCascadeDeleteWithUnidirectionalAssociation(SessionFactoryScope scope) {
OnDeleteUnidirectionalOneToManyChild child = new OnDeleteUnidirectionalOneToManyChild();
scope.inTransaction( session -> {
OnDeleteUnidirectionalOneToManyParent parent = new OnDeleteUnidirectionalOneToManyParent();
parent.children = Collections.singletonList( child );
session.persist( parent );
} );
scope.inTransaction( session ->
session.createMutationQuery( "delete from OnDeleteUnidirectionalOneToManyParent" ).executeUpdate()
);
scope.inTransaction( session -> {
OnDeleteUnidirectionalOneToManyChild e1 = session.find(
OnDeleteUnidirectionalOneToManyChild.class,
child.id );
assertThat( e1 ).describedAs( "delete cascade should work" ).isNull();
} );
}
@Test
public void testOnDeleteWithoutJoinColumn() {
StandardServiceRegistry serviceRegistry = ServiceRegistryUtil.serviceRegistry();
try {
AnnotationException e = assertThrows( AnnotationException.class,
() -> new MetadataSources( serviceRegistry )
.addAnnotatedClass( OnDeleteUnidirectionalOneToMany.class )
.addAnnotatedClass( ParentUnawareChild.class )
.getMetadataBuilder()
.build()
);
assertThat( e.getMessage() )
.contains( "is annotated '@OnDelete' and must explicitly specify a '@JoinColumn'" );
}
finally {
StandardServiceRegistryBuilder.destroy( serviceRegistry );
}
}
@Test
public void testSimpleOneToManySet(SessionFactoryScope scope) {
Customer customer = new Customer();
Ticket t2 = new Ticket();
scope.inTransaction(
session -> {
Ticket t = new Ticket();
t.setNumber( "33A" );
t2.setNumber( "234ER" );
session.persist( customer );
//s.persist(t);
SortedSet<Ticket> tickets = new TreeSet<>( new TicketComparator() );
tickets.add( t );
tickets.add( t2 );
customer.setTickets( tickets );
}
);
scope.inTransaction(
session -> {
Customer c = session.getReference( Customer.class, customer.getId() );
assertThat( c ).isNotNull();
assertThat( Hibernate.isInitialized( c.getTickets() ) ).isTrue();
assertThat( c.getTickets() ).isNotNull();
SortedSet<Ticket> tickets = c.getTickets();
assertThat( tickets.size() ).isGreaterThan( 0 );
assertThat( c.getTickets().first().getNumber() ).isEqualTo( t2.getNumber() );
}
);
}
@Test
public void testSimpleOneToManyCollection(SessionFactoryScope scope) {
Customer c = new Customer();
scope.inTransaction(
session -> {
Discount d = new Discount();
d.setDiscount( 10 );
List<Discount> discounts = new ArrayList<>();
discounts.add( d );
d.setOwner( c );
c.setDiscountTickets( discounts );
session.persist( c );
}
);
scope.inTransaction(
session -> {
Customer customer = session.getReference( Customer.class, c.getId() );
assertThat( customer ).isNotNull();
assertThat( Hibernate.isInitialized( customer.getDiscountTickets() ) )
.isFalse();
assertThat( customer.getDiscountTickets() ).isNotNull();
Collection<Discount> collecDiscount = customer.getDiscountTickets();
assertThat( collecDiscount.size() ).isGreaterThan( 0 );
}
);
}
@Test
public void testJoinColumns(SessionFactoryScope scope) {
Parent p = new Parent();
ParentPk pk = new ParentPk();
pk.firstName = "Bruce";
pk.lastName = "Willis";
pk.isMale = true;
p.id = pk;
p.age = 40;
Child child = new Child();
Child child2 = new Child();
p.addChild( child );
p.addChild( child2 );
scope.inTransaction(
session ->
session.persist( p )
);
assertThat( child.id ).isNotNull();
assertThat( child2.id ).isNotNull();
assertThat( child.id ).isNotSameAs( child2.id );
scope.inTransaction(
session -> {
Parent parent = session.find( Parent.class, pk );
assertThat( parent.children ).isNotNull();
Hibernate.initialize( parent.children );
assertThat( parent.children.size() ).isEqualTo( 2 );
}
);
}
@Test
@JiraKey(value = "HHH-4394")
public void testOrderByOnSuperclassProperty(SessionFactoryScope scope) {
OrganisationUser user = new OrganisationUser();
user.setFirstName( "Emmanuel" );
user.setLastName( "Bernard" );
user.setIdPerson( 1L );
user.setSomeText( "SomeText" );
Organisation org = new Organisation();
org.setIdOrganisation( 1L );
org.setName( "S Diego Zoo" );
user.setOrganisation( org );
scope.inTransaction(
session -> {
session.persist( user );
session.persist( org );
session.flush();
session.clear();
session.createQuery( "select org from Organisation org left join fetch org.organisationUsers",
Organisation.class )
.list();
}
);
}
@Test
@JiraKey(value = "HHH-4605")
public void testJoinColumnConfiguredInXml(SessionFactoryScope scope) {
PersistentClass pc = scope.getMetadataImplementor().getEntityBinding( Model.class.getName() );
Table table = pc.getRootTable();
boolean joinColumnFound = false;
for ( Column column : table.getColumns() ) {
if ( column.getName().equals( "model_manufacturer_join" ) ) {
joinColumnFound = true;
break;
}
}
assertThat( joinColumnFound )
.describedAs( "The mapping defines a joing column which could not be found in the metadata." )
.isTrue();
}
@Entity(name = "OnDeleteUnidirectionalOneToManyParent")
@jakarta.persistence.Table(name = "OneToManyParent")
public static | ListSemanticProvider |
java | apache__camel | components/camel-hashicorp-vault/src/generated/java/org/apache/camel/component/hashicorp/vault/HashicorpVaultEndpointUriFactory.java | {
"start": 525,
"end": 2539
} | class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = ":secretsEngine";
private static final Set<String> PROPERTY_NAMES;
private static final Set<String> SECRET_PROPERTY_NAMES;
private static final Map<String, String> MULTI_VALUE_PREFIXES;
static {
Set<String> props = new HashSet<>(11);
props.add("cloud");
props.add("host");
props.add("lazyStartProducer");
props.add("namespace");
props.add("operation");
props.add("port");
props.add("scheme");
props.add("secretPath");
props.add("secretsEngine");
props.add("token");
props.add("vaultTemplate");
PROPERTY_NAMES = Collections.unmodifiableSet(props);
Set<String> secretProps = new HashSet<>(1);
secretProps.add("token");
SECRET_PROPERTY_NAMES = Collections.unmodifiableSet(secretProps);
MULTI_VALUE_PREFIXES = Collections.emptyMap();
}
@Override
public boolean isEnabled(String scheme) {
return "hashicorp-vault".equals(scheme);
}
@Override
public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "secretsEngine", null, false, copy);
uri = buildQueryParameters(uri, copy, encode);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public Set<String> secretPropertyNames() {
return SECRET_PROPERTY_NAMES;
}
@Override
public Map<String, String> multiValuePrefixes() {
return MULTI_VALUE_PREFIXES;
}
@Override
public boolean isLenientProperties() {
return false;
}
}
| HashicorpVaultEndpointUriFactory |
java | quarkusio__quarkus | integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/KubernetesInitContainersTest.java | {
"start": 594,
"end": 3366
} | class ____ {
@RegisterExtension
static final QuarkusProdModeTest config = new QuarkusProdModeTest()
.withApplicationRoot((jar) -> jar.addClasses(GreetingResource.class))
.setApplicationName("kubernetes-with-init-containers")
.setApplicationVersion("0.1-SNAPSHOT")
.withConfigurationResource("kubernetes-with-init-containers.properties")
.setForcedDependencies(List.of(Dependency.of("io.quarkus", "quarkus-smallrye-health", Version.getVersion())));
@ProdBuildResults
private ProdModeTestResults prodModeTestResults;
@Test
public void assertGeneratedResources() throws IOException {
Path kubernetesDir = prodModeTestResults.getBuildDir().resolve("kubernetes");
assertThat(kubernetesDir)
.isDirectoryContaining(p -> p.getFileName().endsWith("kubernetes.json"))
.isDirectoryContaining(p -> p.getFileName().endsWith("kubernetes.yml"));
List<HasMetadata> kubernetesList = DeserializationUtil
.deserializeAsList(kubernetesDir.resolve("kubernetes.yml"));
assertThat(kubernetesList).filteredOn(i -> "Deployment".equals(i.getKind())).singleElement().satisfies(i -> {
assertThat(i).isInstanceOfSatisfying(Deployment.class, d -> {
assertThat(d.getSpec()).satisfies(deploymentSpec -> {
assertThat(deploymentSpec.getReplicas()).isEqualTo(1);
assertThat(deploymentSpec.getTemplate()).satisfies(t -> {
assertThat(t.getSpec()).satisfies(podSpec -> {
assertThat(podSpec.getServiceAccount()).isNull();
assertThat(podSpec.getInitContainers()).singleElement().satisfies(container -> {
assertThat(container.getImage()).isEqualTo("busybox:1.28");
assertThat(container.getCommand()).containsExactly("sh", "-c", "echo",
"The init container is running!");
assertThat(container.getReadinessProbe()).isNull();
assertThat(container.getLivenessProbe()).isNull();
assertThat(container.getLifecycle()).isNull();
});
assertThat(podSpec.getContainers()).singleElement().satisfies(container -> {
assertThat(container.getReadinessProbe()).isNotNull();
assertThat(container.getLivenessProbe()).isNotNull();
});
});
});
});
});
});
}
}
| KubernetesInitContainersTest |
java | elastic__elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtAlgorithmValidator.java | {
"start": 548,
"end": 1434
} | class ____ implements JwtFieldValidator {
private final List<String> allowedAlgorithms;
public JwtAlgorithmValidator(List<String> allowedAlgorithms) {
this.allowedAlgorithms = allowedAlgorithms;
}
public void validate(JWSHeader jwsHeader, JWTClaimsSet jwtClaimsSet) {
final JWSAlgorithm algorithm = jwsHeader.getAlgorithm();
if (algorithm == null) {
throw new IllegalArgumentException("missing JWT algorithm header");
}
if (false == allowedAlgorithms.contains(algorithm.getName())) {
throw new IllegalArgumentException(
format(
"invalid JWT algorithm [%s], allowed algorithms are [%s]",
algorithm,
Strings.collectionToCommaDelimitedString(allowedAlgorithms)
)
);
}
}
}
| JwtAlgorithmValidator |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/StringFormatWithLiteralTest.java | {
"start": 1833,
"end": 2255
} | class ____ {
String test() {
return String.format("Formatting this float: %f", 101.0);
}
}
""")
.doTest();
}
@Test
public void negativeStringFormatWithVariableAsFormatString() {
compilationHelper
.addSourceLines(
"ExampleClass.java",
"""
import java.lang.String;
public | ExampleClass |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/asm/ClassWriter.java | {
"start": 38353,
"end": 39465
} | class ____ or adapters.</i>
*
* @param tag the kind of this handle. Must be {@link Opcodes#H_GETFIELD}, {@link
* Opcodes#H_GETSTATIC}, {@link Opcodes#H_PUTFIELD}, {@link Opcodes#H_PUTSTATIC}, {@link
* Opcodes#H_INVOKEVIRTUAL}, {@link Opcodes#H_INVOKESTATIC}, {@link Opcodes#H_INVOKESPECIAL},
* {@link Opcodes#H_NEWINVOKESPECIAL} or {@link Opcodes#H_INVOKEINTERFACE}.
* @param owner the internal name of the field or method owner class (see {@link
* Type#getInternalName()}).
* @param name the name of the field or method.
* @param descriptor the descriptor of the field or method.
* @param isInterface true if the owner is an interface.
* @return the index of a new or already existing method type reference item.
*/
public int newHandle(
final int tag,
final String owner,
final String name,
final String descriptor,
final boolean isInterface) {
return symbolTable.addConstantMethodHandle(tag, owner, name, descriptor, isInterface).index;
}
/**
* Adds a dynamic constant reference to the constant pool of the | generators |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng0773SettingsProfileReactorPollutionTest.java | {
"start": 1034,
"end": 1752
} | class ____ extends AbstractMavenIntegrationTestCase {
/**
* Verify that profiles from settings.xml do not pollute module lists across projects in a reactorized build.
*
* @throws Exception in case of failure
*/
@Test
public void testitMNG773() throws Exception {
File testDir = extractResources("/mng-0773");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
verifier.addCliArgument("--settings");
verifier.addCliArgument("settings.xml");
verifier.addCliArgument("validate");
verifier.execute();
verifier.verifyErrorFreeLog();
}
}
| MavenITmng0773SettingsProfileReactorPollutionTest |
java | quarkusio__quarkus | extensions/oidc-client-filter/deployment/src/test/java/io/quarkus/oidc/client/filter/OidcClientFilterRevokedAccessTokenDevModeTest.java | {
"start": 4335,
"end": 4569
} | interface ____ {
@OidcClientFilter(NAMED_CLIENT)
@POST
String revokeAccessTokenAndRespond(String named);
}
@RegisterRestClient
@Path(MY_SERVER_RESOURCE_PATH)
public | MyNamedClient_AnnotationOnMethod |
java | quarkusio__quarkus | core/runtime/src/main/java/io/quarkus/runtime/graal/DisabledSSLContext.java | {
"start": 567,
"end": 787
} | class ____ extends SSLContext {
public DisabledSSLContext() {
super(new DisabledSSLContextSpi(), new Provider("DISABLED", "1.0", "DISABLED") {
}, "DISABLED");
}
private static | DisabledSSLContext |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/dynamic/support/ResolvableType.java | {
"start": 46513,
"end": 47012
} | interface ____ extends Serializable {
/**
* Return the source of the resolver (used for hashCode and equals).
*/
Object getSource();
/**
* Resolve the specified variable.
*
* @param variable the variable to resolve
* @return the resolved variable, or {@code null} if not found
*/
ResolvableType resolveVariable(TypeVariable<?> variable);
}
@SuppressWarnings("serial")
private | VariableResolver |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/DockerEndpointBuilderFactory.java | {
"start": 26897,
"end": 29420
} | interface ____ extends EndpointProducerBuilder {
default DockerEndpointProducerBuilder basic() {
return (DockerEndpointProducerBuilder) this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedDockerEndpointProducerBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedDockerEndpointProducerBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* The fully qualified | AdvancedDockerEndpointProducerBuilder |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/listeners/InvocationListener.java | {
"start": 386,
"end": 882
} | interface ____ {
/**
* Called after the invocation of the listener's mock if it returned normally.
*
* <p>
* Exceptions caused by this invocationListener will raise a {@link org.mockito.exceptions.base.MockitoException}.
* </p>
*
* @param methodInvocationReport Information about the method call that just happened.
*
* @see MethodInvocationReport
*/
void reportInvocation(MethodInvocationReport methodInvocationReport);
}
| InvocationListener |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/TestErrorTranslation.java | {
"start": 3844,
"end": 5680
} | class ____
* extracted.
*/
@Test
public void testUncheckedIOExceptionExtraction() throws Throwable {
intercept(SocketTimeoutException.class, "top",
() -> {
final SdkClientException thrown = sdkException("top",
sdkException("middle",
new UncheckedIOException(
new SocketTimeoutException("bottom"))));
throw maybeExtractIOException("p1",
new NoAwsCredentialsException("IamProvider", thrown.toString(), thrown), null);
});
}
@Test
public void testNoConstructorExtraction() throws Throwable {
intercept(PathIOException.class, NoConstructorIOE.MESSAGE,
() -> {
throw maybeExtractIOException("p1",
sdkException("top",
sdkException("middle",
new NoConstructorIOE())), null);
});
}
@Test
public void testEncryptionClientExceptionExtraction() throws Throwable {
intercept(NoSuchKeyException.class, () -> {
throw maybeProcessEncryptionClientException(
new S3EncryptionClientException("top",
new S3EncryptionClientException("middle", NoSuchKeyException.builder().build())));
});
}
@Test
public void testNonEncryptionClientExceptionExtraction() throws Throwable {
intercept(SdkException.class, () -> {
throw maybeProcessEncryptionClientException(
sdkException("top", sdkException("middle", NoSuchKeyException.builder().build())));
});
}
@Test
public void testEncryptionClientExceptionExtractionWithRTE() throws Throwable {
intercept(S3EncryptionClientException.class, () -> {
throw maybeProcessEncryptionClientException(
new S3EncryptionClientException("top", new UnsupportedOperationException()));
});
}
public static final | is |
java | google__dagger | javatests/dagger/internal/codegen/DelegateRequestRepresentationTest.java | {
"start": 8962,
"end": 9262
} | interface ____ {}");
Source subtype =
CompilerTests.javaSource(
"other.Subtype",
"package other;",
"",
"import javax.inject.Inject;",
"import javax.inject.Singleton;",
"",
"@Singleton",
" | Supertype |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/OptionalNotPresent.java | {
"start": 3265,
"end": 6303
} | class ____ extends SuppressibleTreePathScanner<Void, Void> {
private final Multiset<ConstantExpression> truths = HashMultiset.create();
private final Multiset<ConstantExpression> falsehoods = HashMultiset.create();
private final VisitorState state;
private IfScanner(VisitorState state) {
super(state);
this.state = state;
}
@Override
public Void visitIf(IfTree tree, Void unused) {
withinScope(
constantExpressions.truthiness(tree.getCondition(), /* not= */ false, state),
tree.getThenStatement());
withinScope(
constantExpressions.truthiness(tree.getCondition(), /* not= */ true, state),
tree.getElseStatement());
return null;
}
@Override
public Void visitConditionalExpression(ConditionalExpressionTree tree, Void unused) {
withinScope(
constantExpressions.truthiness(tree.getCondition(), /* not= */ false, state),
tree.getTrueExpression());
withinScope(
constantExpressions.truthiness(tree.getCondition(), /* not= */ true, state),
tree.getFalseExpression());
return null;
}
private void withinScope(Truthiness truthiness, Tree tree) {
truths.addAll(truthiness.requiredTrue());
falsehoods.addAll(truthiness.requiredFalse());
scan(tree, null);
removeOccurrences(truths, truthiness.requiredTrue());
removeOccurrences(falsehoods, truthiness.requiredFalse());
}
@Override
public Void visitMethodInvocation(MethodInvocationTree tree, Void unused) {
if (OPTIONAL_GET.matches(tree, state)) {
var receiver = getReceiver(tree);
if (receiver != null) {
constantExpressions
.constantExpression(receiver, state)
.ifPresent(o -> checkForEmptiness(tree, o));
}
}
return super.visitMethodInvocation(tree, null);
}
private void checkForEmptiness(
MethodInvocationTree tree, ConstantExpression constantExpression) {
if (getMethodInvocations(truths)
.filter(
truth ->
truth.symbol() instanceof MethodSymbol
&& truth.symbol().getSimpleName().contentEquals("isEmpty"))
.flatMap(truth -> truth.receiver().stream())
.anyMatch(constantExpression::equals)
|| getMethodInvocations(falsehoods)
.filter(
truth ->
truth.symbol() instanceof MethodSymbol
&& truth.symbol().getSimpleName().contentEquals("isPresent"))
.flatMap(truth -> truth.receiver().stream())
.anyMatch(constantExpression::equals)) {
state.reportMatch(describeMatch(tree));
}
}
private Stream<PureMethodInvocation> getMethodInvocations(Multiset<ConstantExpression> truths) {
return truths.stream()
.flatMap(ce -> ce instanceof PureMethodInvocation pmi ? Stream.of(pmi) : Stream.empty());
}
}
}
| IfScanner |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/bean/priority/BeanHasPriorityTest.java | {
"start": 2450,
"end": 2519
} | class ____ {
}
@Singleton
@Priority(0)
static | NoPriority |
java | google__guice | extensions/spring/src/com/google/inject/spring/SpringIntegration.java | {
"start": 2539,
"end": 3621
} | class ____<T> implements Provider<T> {
BeanFactory beanFactory;
boolean singleton;
final Class<T> type;
final String name;
public SpringProvider(Class<T> type, String name) {
this.type = checkNotNull(type, "type");
this.name = checkNotNull(name, "name");
}
static <T> SpringProvider<T> newInstance(Class<T> type, String name) {
return new SpringProvider<T>(type, name);
}
void initialize(BeanFactory beanFactory) {
this.beanFactory = beanFactory;
if (!beanFactory.isTypeMatch(name, type)) {
throw new ClassCastException(
"Spring bean named '" + name + "' does not implement " + type.getName() + ".");
}
singleton = beanFactory.isSingleton(name);
}
@Override
public T get() {
return singleton ? getSingleton() : type.cast(beanFactory.getBean(name));
}
volatile T instance;
private T getSingleton() {
if (instance == null) {
instance = type.cast(beanFactory.getBean(name));
}
return instance;
}
}
static | SpringProvider |
java | google__auto | common/src/test/java/com/google/auto/common/OverridesTest.java | {
"start": 8198,
"end": 8299
} | class ____ extends RawParent {
@Override
void frob(List x) {}
}
static | RawChildOfRaw |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/PeriodicStatsAccumulator.java | {
"start": 3705,
"end": 8040
} | class ____ three instance variables, {@code oldProgress} and
* {@code oldValue} and {@code currentAccumulation}.
*
* {@code extendInternal} can count on three things:
*
* 1: The first time it's called in a particular instance, both
* oldXXX's will be zero.
*
* 2: oldXXX for a later call is the value of newXXX of the
* previous call. This ensures continuity in accumulation from
* one call to the next.
*
* 3: {@code currentAccumulation} is owned by
* {@code initializeInterval} and {@code extendInternal}.
*/
protected abstract void extendInternal(double newProgress, int newValue);
// What has to be done when you open a new interval
/**
* initializes the state variables to be ready for a new interval
*/
protected void initializeInterval() {
state.currentAccumulation = 0.0D;
}
// called for each new reading
/**
* This method calls {@code extendInternal} at least once. It
* divides the current progress interval [from the last call's
* {@code newProgress} to this call's {@code newProgress} ]
* into one or more subintervals by splitting at any point which
* is an interval boundary if there are any such points. It
* then calls {@code extendInternal} for each subinterval, or the
* whole interval if there are no splitting points.
*
* <p>For example, if the value was {@code 300} last time with
* {@code 0.3} progress, and count is {@code 5}, and you get a
* new reading with the variable at {@code 700} and progress at
* {@code 0.7}, you get three calls to {@code extendInternal}:
* one extending from progress {@code 0.3} to {@code 0.4} [the
* next boundary] with a value of {@code 400}, the next one
* through {@code 0.6} with a value of {@code 600}, and finally
* one at {@code 700} with a progress of {@code 0.7} .
*
* @param newProgress the endpoint of the progress range this new
* reading covers
* @param newValue the value of the reading at {@code newProgress}
*/
protected void extend(double newProgress, int newValue) {
if (state == null || newProgress < state.oldProgress) {
return;
}
// This correctness of this code depends on 100% * count = count.
int oldIndex = (int)(state.oldProgress * count);
int newIndex = (int)(newProgress * count);
int originalOldValue = state.oldValue;
double fullValueDistance = (double)newValue - state.oldValue;
double fullProgressDistance = newProgress - state.oldProgress;
double originalOldProgress = state.oldProgress;
// In this loop we detect each subinterval boundary within the
// range from the old progress to the new one. Then we
// interpolate the value from the old value to the new one to
// infer what its value might have been at each such boundary.
// Lastly we make the necessary calls to extendInternal to fold
// in the data for each trapazoid where no such trapazoid
// crosses a boundary.
for (int closee = oldIndex; closee < newIndex; ++closee) {
double interpolationProgress = (double)(closee + 1) / count;
// In floats, x * y / y might not equal y.
interpolationProgress = Math.min(interpolationProgress, newProgress);
double progressLength = (interpolationProgress - originalOldProgress);
double interpolationProportion = progressLength / fullProgressDistance;
double interpolationValueDistance
= fullValueDistance * interpolationProportion;
// estimates the value at the next [interpolated] subsegment boundary
int interpolationValue
= (int)interpolationValueDistance + originalOldValue;
extendInternal(interpolationProgress, interpolationValue);
advanceState(interpolationProgress, interpolationValue);
values[closee] = (int)state.currentAccumulation;
initializeInterval();
}
extendInternal(newProgress, newValue);
advanceState(newProgress, newValue);
if (newIndex == count) {
state = null;
}
}
protected void advanceState(double newProgress, int newValue) {
state.oldValue = newValue;
state.oldProgress = newProgress;
}
int getCount() {
return count;
}
int get(int index) {
return values[index];
}
}
| has |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/Tree.java | {
"start": 1480,
"end": 11530
} | class ____ implements LenientlyParsedTrainedModel, StrictlyParsedTrainedModel, Accountable {
private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(Tree.class);
// TODO should we have regression/classification sub-classes that accept the builder?
public static final ParseField NAME = new ParseField("tree");
public static final ParseField FEATURE_NAMES = new ParseField("feature_names");
public static final ParseField TREE_STRUCTURE = new ParseField("tree_structure");
public static final ParseField CLASSIFICATION_LABELS = new ParseField("classification_labels");
private static final ObjectParser<Tree.Builder, Void> LENIENT_PARSER = createParser(true);
private static final ObjectParser<Tree.Builder, Void> STRICT_PARSER = createParser(false);
private static ObjectParser<Tree.Builder, Void> createParser(boolean lenient) {
ObjectParser<Tree.Builder, Void> parser = new ObjectParser<>(NAME.getPreferredName(), lenient, Tree.Builder::new);
parser.declareStringArray(Tree.Builder::setFeatureNames, FEATURE_NAMES);
parser.declareObjectArray(Tree.Builder::setNodes, (p, c) -> TreeNode.fromXContent(p, lenient), TREE_STRUCTURE);
parser.declareString(Tree.Builder::setTargetType, TargetType.TARGET_TYPE);
parser.declareStringArray(Tree.Builder::setClassificationLabels, CLASSIFICATION_LABELS);
return parser;
}
public static Tree fromXContentStrict(XContentParser parser) {
return STRICT_PARSER.apply(parser, null).build();
}
public static Tree fromXContentLenient(XContentParser parser) {
return LENIENT_PARSER.apply(parser, null).build();
}
private final List<String> featureNames;
private final List<TreeNode> nodes;
private final TargetType targetType;
private final List<String> classificationLabels;
Tree(List<String> featureNames, List<TreeNode> nodes, TargetType targetType, List<String> classificationLabels) {
this.featureNames = Collections.unmodifiableList(ExceptionsHelper.requireNonNull(featureNames, FEATURE_NAMES));
if (ExceptionsHelper.requireNonNull(nodes, TREE_STRUCTURE).size() == 0) {
throw new IllegalArgumentException("[tree_structure] must not be empty");
}
this.nodes = Collections.unmodifiableList(nodes);
this.targetType = ExceptionsHelper.requireNonNull(targetType, TargetType.TARGET_TYPE);
this.classificationLabels = classificationLabels == null ? null : Collections.unmodifiableList(classificationLabels);
}
public Tree(StreamInput in) throws IOException {
this.featureNames = in.readCollectionAsImmutableList(StreamInput::readString);
this.nodes = in.readCollectionAsImmutableList(TreeNode::new);
this.targetType = TargetType.fromStream(in);
if (in.readBoolean()) {
this.classificationLabels = in.readCollectionAsImmutableList(StreamInput::readString);
} else {
this.classificationLabels = null;
}
}
@Override
public String getName() {
return NAME.getPreferredName();
}
@Override
public TargetType targetType() {
return targetType;
}
@Override
public String getWriteableName() {
return NAME.getPreferredName();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeStringCollection(featureNames);
out.writeCollection(nodes);
targetType.writeTo(out);
out.writeBoolean(classificationLabels != null);
if (classificationLabels != null) {
out.writeStringCollection(classificationLabels);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(FEATURE_NAMES.getPreferredName(), featureNames);
builder.field(TREE_STRUCTURE.getPreferredName(), nodes);
builder.field(TargetType.TARGET_TYPE.getPreferredName(), targetType.toString());
if (classificationLabels != null) {
builder.field(CLASSIFICATION_LABELS.getPreferredName(), classificationLabels);
}
builder.endObject();
return builder;
}
@Override
public String toString() {
return Strings.toString(this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Tree that = (Tree) o;
return Objects.equals(featureNames, that.featureNames)
&& Objects.equals(nodes, that.nodes)
&& Objects.equals(targetType, that.targetType)
&& Objects.equals(classificationLabels, that.classificationLabels);
}
@Override
public int hashCode() {
return Objects.hash(featureNames, nodes, targetType, classificationLabels);
}
public static Builder builder() {
return new Builder();
}
@Override
public void validate() {
int maxFeatureIndex = maxFeatureIndex();
if (maxFeatureIndex >= featureNames.size()) {
throw ExceptionsHelper.badRequestException(
"feature index [{}] is out of bounds for the [{}] array",
maxFeatureIndex,
FEATURE_NAMES.getPreferredName()
);
}
if (nodes.size() > 1) {
if (featureNames.isEmpty()) {
throw ExceptionsHelper.badRequestException(
"[{}] is empty and the tree has > 1 nodes; num nodes [{}]. " + "The model Must have features if tree is not a stump",
FEATURE_NAMES.getPreferredName(),
nodes.size()
);
}
}
checkTargetType();
detectMissingNodes();
detectCycle();
verifyLeafNodeUniformity();
}
@Override
public long estimatedNumOperations() {
// Grabbing the features from the doc + the depth of the tree
return (long) Math.ceil(Math.log(nodes.size())) + featureNames.size();
}
/**
* The highest index of a feature used any of the nodes.
* If no nodes use a feature return -1. This can only happen
* if the tree contains a single leaf node.
*
* @return The max or -1
*/
int maxFeatureIndex() {
int maxFeatureIndex = -1;
for (TreeNode node : nodes) {
maxFeatureIndex = Math.max(maxFeatureIndex, node.getSplitFeature());
}
return maxFeatureIndex;
}
private void checkTargetType() {
if (this.classificationLabels != null && this.targetType != TargetType.CLASSIFICATION) {
throw ExceptionsHelper.badRequestException("[target_type] should be [classification] if [classification_labels] are provided");
}
if (this.targetType != TargetType.CLASSIFICATION && this.nodes.stream().anyMatch(n -> n.getLeafValue().length > 1)) {
throw ExceptionsHelper.badRequestException("[target_type] should be [classification] if leaf nodes have multiple values");
}
}
private void detectCycle() {
Set<Integer> visited = Sets.newHashSetWithExpectedSize(nodes.size());
Queue<Integer> toVisit = new ArrayDeque<>(nodes.size());
toVisit.add(0);
while (toVisit.isEmpty() == false) {
Integer nodeIdx = toVisit.remove();
if (visited.contains(nodeIdx)) {
throw ExceptionsHelper.badRequestException("[tree] contains cycle at node {}", nodeIdx);
}
visited.add(nodeIdx);
TreeNode treeNode = nodes.get(nodeIdx);
if (treeNode.getLeftChild() >= 0) {
toVisit.add(treeNode.getLeftChild());
}
if (treeNode.getRightChild() >= 0) {
toVisit.add(treeNode.getRightChild());
}
}
}
private void detectMissingNodes() {
List<Integer> missingNodes = new ArrayList<>();
for (int i = 0; i < nodes.size(); i++) {
TreeNode currentNode = nodes.get(i);
if (currentNode == null) {
continue;
}
if (nodeMissing(currentNode.getLeftChild(), nodes)) {
missingNodes.add(currentNode.getLeftChild());
}
if (nodeMissing(currentNode.getRightChild(), nodes)) {
missingNodes.add(currentNode.getRightChild());
}
}
if (missingNodes.isEmpty() == false) {
throw ExceptionsHelper.badRequestException("[tree] contains missing nodes {}", missingNodes);
}
}
private void verifyLeafNodeUniformity() {
Integer leafValueLengths = null;
for (TreeNode node : nodes) {
if (node.isLeaf()) {
if (leafValueLengths == null) {
leafValueLengths = node.getLeafValue().length;
} else if (leafValueLengths != node.getLeafValue().length) {
throw ExceptionsHelper.badRequestException("[tree.tree_structure] all leaf nodes must have the same number of values");
}
}
}
}
private static boolean nodeMissing(int nodeIdx, List<TreeNode> nodes) {
return nodeIdx >= nodes.size();
}
@Override
public long ramBytesUsed() {
long size = SHALLOW_SIZE;
size += RamUsageEstimator.sizeOfCollection(classificationLabels);
size += RamUsageEstimator.sizeOfCollection(featureNames);
size += RamUsageEstimator.sizeOfCollection(nodes);
return size;
}
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> accountables = new ArrayList<>(nodes.size());
for (TreeNode node : nodes) {
accountables.add(Accountables.namedAccountable("tree_node_" + node.getNodeIndex(), node));
}
return Collections.unmodifiableCollection(accountables);
}
public static final | Tree |
java | micronaut-projects__micronaut-core | core-processor/src/main/java/io/micronaut/expressions/parser/ast/operator/binary/MatchesOperator.java | {
"start": 1726,
"end": 3076
} | class ____ extends ExpressionNode {
private static final java.lang.reflect.Method MATCHES = ReflectionUtils.getRequiredMethod(
String.class, "matches", String.class
);
private final ExpressionNode operand;
private final StringLiteral pattern;
public MatchesOperator(ExpressionNode operand, StringLiteral pattern) {
this.operand = operand;
this.pattern = pattern;
}
@Override
public ExpressionDef generateExpression(ExpressionCompilationContext ctx) {
return operand.compile(ctx)
.invoke(MATCHES, pattern.compile(ctx));
}
@Override
protected ClassElement doResolveClassElement(ExpressionVisitorContext ctx) {
return PrimitiveElement.BOOLEAN;
}
@Override
protected TypeDef doResolveType(@NonNull ExpressionVisitorContext ctx) {
if (!operand.resolveType(ctx).equals(STRING)) {
throw new ExpressionCompilationException(
"Operator 'matches' can only be applied to String operand");
}
String patternValue = pattern.getValue();
try {
Pattern.compile(patternValue);
} catch (PatternSyntaxException ex) {
throw new ExpressionCompilationException("Invalid RegEx pattern provided: " + patternValue);
}
return BOOLEAN;
}
}
| MatchesOperator |
java | apache__flink | flink-python/src/main/java/org/apache/flink/table/runtime/arrow/writers/ArrayWriter.java | {
"start": 2902,
"end": 3471
} | class ____ extends ArrayWriter<RowData> {
private ArrayWriterForRow(
ListVector listVector, ArrowFieldWriter<ArrayData> elementWriter) {
super(listVector, elementWriter);
}
@Override
boolean isNullAt(RowData in, int ordinal) {
return in.isNullAt(ordinal);
}
@Override
ArrayData readArray(RowData in, int ordinal) {
return in.getArray(ordinal);
}
}
/** {@link ArrayWriter} for {@link ArrayData} input. */
public static final | ArrayWriterForRow |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java | {
"start": 155244,
"end": 157107
} | interface ____ {
void handleCompletion(
ShardSnapshotUpdateResult shardSnapshotUpdateResult,
List<SnapshotsInProgress.Entry> newlyCompletedEntries,
Set<ProjectRepo> updatedRepositories
);
}
private void handleShardSnapshotUpdateCompletion(
ShardSnapshotUpdateResult shardSnapshotUpdateResult,
List<SnapshotsInProgress.Entry> newlyCompletedEntries,
Set<ProjectRepo> updatedRepositories
) {
// Maybe this state update completed one or more snapshots. If we are not already ending them because of some earlier update, end
// them now.
final var snapshotsInProgress = shardSnapshotUpdateResult.snapshotsInProgress();
for (final var newlyCompletedEntry : newlyCompletedEntries) {
if (endingSnapshots.contains(newlyCompletedEntry.snapshot()) == false) {
endSnapshot(newlyCompletedEntry, shardSnapshotUpdateResult.metadata, null);
}
}
// Likewise this state update may enable some new shard clones on any affected repository, so check them all.
for (final var updatedRepository : updatedRepositories) {
startExecutableClones(snapshotsInProgress, updatedRepository);
}
// Also shard snapshot completions may free up some shards to move to other nodes, so we must trigger a reroute.
if (updatedRepositories.isEmpty() == false) {
rerouteService.reroute("after shards snapshot update", Priority.NORMAL, ActionListener.noop());
}
}
/**
* Task to update the state of a shard snapshot in the cluster state metadata. Created when a data node request reaches the master node
* to change the state of the shard snapshot.
*
* Package private for testing
*/
static final | ShardSnapshotUpdateCompletionHandler |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/mapping/UnidirectionalOneToManyIndexColumnTest.java | {
"start": 3062,
"end": 3217
} | class ____ {
@Id
@GeneratedValue
private int id;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
}
}
| Child |
java | google__dagger | javatests/dagger/internal/codegen/GenericMethodsTest.java | {
"start": 971,
"end": 1641
} | class ____ {
@Parameters(name = "{0}")
public static ImmutableList<Object[]> parameters() {
return CompilerMode.TEST_PARAMETERS;
}
private final CompilerMode compilerMode;
public GenericMethodsTest(CompilerMode compilerMode) {
this.compilerMode = compilerMode;
}
@Test
public void parameterizedComponentMethods() {
Source component =
CompilerTests.javaSource(
"test.TestComponent",
"package test;",
"",
"import dagger.Component;",
"import dagger.MembersInjector;",
"import java.util.Set;",
"",
"@Component",
" | GenericMethodsTest |
java | apache__dubbo | dubbo-common/src/test/java/org/apache/dubbo/common/compiler/support/JavaCodeTest.java | {
"start": 4083,
"end": 4436
} | class ____" + SUBFIX.getAndIncrement()
+ " extends org.apache.dubbo.common.compiler.support.HelloServiceImpl0 {\n");
code.append(" public String sayHello() { ");
code.append(" return \"Hello world3!\"; ");
code.append(" }");
code.append('}');
return code.toString();
}
}
| HelloServiceImpl |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/operators/hash/MutableHashTable.java | {
"start": 2421,
"end": 4107
} | class ____ in many parts the design presented in "Hash joins and hash
* teams in Microsoft SQL Server", by Goetz Graefe et al. In its current state, the implementation
* lacks features like dynamic role reversal, partition tuning, or histogram guided partitioning.
*
* <p>The layout of the buckets inside a memory segment is as follows:
*
* <pre>
* +----------------------------- Bucket x ----------------------------
* |Partition (1 byte) | Status (1 byte) | element count (2 bytes) |
* | next-bucket-in-chain-pointer (8 bytes) | probedFlags (2 bytes) | reserved (2 bytes) |
* |
* |hashCode 1 (4 bytes) | hashCode 2 (4 bytes) | hashCode 3 (4 bytes) |
* | ... hashCode n-1 (4 bytes) | hashCode n (4 bytes)
* |
* |pointer 1 (8 bytes) | pointer 2 (8 bytes) | pointer 3 (8 bytes) |
* | ... pointer n-1 (8 bytes) | pointer n (8 bytes)
* |
* +---------------------------- Bucket x + 1--------------------------
* |Partition (1 byte) | Status (1 byte) | element count (2 bytes) |
* | next-bucket-in-chain-pointer (8 bytes) | probedFlags (2 bytes) | reserved (2 bytes) |
* |
* |hashCode 1 (4 bytes) | hashCode 2 (4 bytes) | hashCode 3 (4 bytes) |
* | ... hashCode n-1 (4 bytes) | hashCode n (4 bytes)
* |
* |pointer 1 (8 bytes) | pointer 2 (8 bytes) | pointer 3 (8 bytes) |
* | ... pointer n-1 (8 bytes) | pointer n (8 bytes)
* +-------------------------------------------------------------------
* | ...
* |
* </pre>
*
* @param <BT> The type of records from the build side that are stored in the hash table.
* @param <PT> The type of records from the probe side that are stored in the hash table.
*/
@SuppressWarnings("ForLoopReplaceableByForEach")
public | follows |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/IdentityHashMapUsageTest.java | {
"start": 4997,
"end": 5156
} | class ____ {
private final IdentityHashMap<String, Integer> m = new IdentityHashMap<>();
}
""")
.doTest();
}
}
| Test |
java | apache__dubbo | dubbo-common/src/test/java/org/apache/dubbo/common/extension/duplicated/DuplicatedWithoutOverriddenExt.java | {
"start": 922,
"end": 1039
} | interface ____ testing duplicated extension
* see issue: https://github.com/apache/dubbo/issues/3575
*/
@SPI
public | for |
java | elastic__elasticsearch | x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertGenUtilsTests.java | {
"start": 1964,
"end": 16467
} | class ____ extends ESTestCase {
/**
* The mapping of key usage names to their corresponding bit index as defined in {@code KeyUsage} class:
*
* <ul>
* <li>digitalSignature (0)</li>
* <li>nonRepudiation (1)</li>
* <li>keyEncipherment (2)</li>
* <li>dataEncipherment (3)</li>
* <li>keyAgreement (4)</li>
* <li>keyCertSign (5)</li>
* <li>cRLSign (6)</li>
* <li>encipherOnly (7)</li>
* <li>decipherOnly (8)</li>
* </ul>
*/
private static final Map<String, Integer> KEY_USAGE_BITS = Map.ofEntries(
Map.entry("digitalSignature", 0),
Map.entry("nonRepudiation", 1),
Map.entry("keyEncipherment", 2),
Map.entry("dataEncipherment", 3),
Map.entry("keyAgreement", 4),
Map.entry("keyCertSign", 5),
Map.entry("cRLSign", 6),
Map.entry("encipherOnly", 7),
Map.entry("decipherOnly", 8)
);
@BeforeClass
public static void muteInFips() {
assumeFalse("Can't run in a FIPS JVM", inFipsJvm());
}
public void testSerialNotRepeated() {
int iterations = scaledRandomIntBetween(10, 100);
List<BigInteger> list = new ArrayList<>(iterations);
for (int i = 0; i < iterations; i++) {
BigInteger serial = CertGenUtils.getSerial();
assertThat(list.contains(serial), is(false));
list.add(serial);
}
}
public void testGenerateKeyPair() throws Exception {
KeyPair keyPair = CertGenUtils.generateKeyPair(randomFrom(1024, 2048));
assertThat(keyPair.getPrivate().getAlgorithm(), is("RSA"));
assertThat(keyPair.getPublic().getAlgorithm(), is("RSA"));
}
public void testSubjectAlternativeNames() throws Exception {
final boolean resolveName = randomBoolean();
InetAddress address = InetAddresses.forString("127.0.0.1");
GeneralNames generalNames = CertGenUtils.getSubjectAlternativeNames(resolveName, Collections.singleton(address));
assertThat(generalNames, notNullValue());
GeneralName[] generalNameArray = generalNames.getNames();
assertThat(generalNameArray, notNullValue());
logger.info("resolve name [{}], address [{}], subject alt names [{}]", resolveName, NetworkAddress.format(address), generalNames);
if (resolveName && isResolvable(address)) {
assertThat(generalNameArray.length, is(2));
int firstType = generalNameArray[0].getTagNo();
if (firstType == GeneralName.iPAddress) {
assertThat(generalNameArray[1].getTagNo(), is(GeneralName.dNSName));
} else if (firstType == GeneralName.dNSName) {
assertThat(generalNameArray[1].getTagNo(), is(GeneralName.iPAddress));
} else {
fail("unknown tag value: " + firstType);
}
} else {
assertThat(generalNameArray.length, is(1));
assertThat(generalNameArray[0].getTagNo(), is(GeneralName.iPAddress));
}
}
@SuppressForbidden(reason = "need to use getHostName to resolve DNS name and getHostAddress to ensure we resolved the name")
private boolean isResolvable(InetAddress inetAddress) {
String hostname = inetAddress.getHostName();
return hostname.equals(inetAddress.getHostAddress()) == false;
}
public void testIssuerCertSubjectDN() throws Exception {
final ZonedDateTime notBefore = ZonedDateTime.now(ZoneOffset.UTC);
final ZonedDateTime notAfter = ZonedDateTime.parse("2099-12-31T23:23:59.999999+00:00");
// root CA
final X500Principal rootCaPrincipal = new X500Principal("DC=example.com");
final KeyPair rootCaKeyPair = CertGenUtils.generateKeyPair(2048);
final List<String> rootCaKeyUsages = List.of("keyCertSign", "cRLSign");
final X509Certificate rootCaCert = CertGenUtils.generateSignedCertificate(
rootCaPrincipal,
null,
rootCaKeyPair,
null,
rootCaKeyPair.getPrivate(),
true,
notBefore,
notAfter,
null,
buildKeyUsage(rootCaKeyUsages),
Set.of()
);
// sub CA
final X500Principal subCaPrincipal = new X500Principal("DC=Sub CA,DC=example.com");
final KeyPair subCaKeyPair = CertGenUtils.generateKeyPair(2048);
final List<String> subCaKeyUsage = List.of("digitalSignature", "keyCertSign", "cRLSign");
final X509Certificate subCaCert = CertGenUtils.generateSignedCertificate(
subCaPrincipal,
null,
subCaKeyPair,
rootCaCert,
rootCaKeyPair.getPrivate(),
true,
notBefore,
notAfter,
null,
buildKeyUsage(subCaKeyUsage),
Set.of()
);
// end entity
final X500Principal endEntityPrincipal = new X500Principal("CN=TLS Client\\+Server,DC=Sub CA,DC=example.com");
final KeyPair endEntityKeyPair = CertGenUtils.generateKeyPair(2048);
final List<String> endEntityKeyUsage = randomBoolean() ? null : List.of("digitalSignature", "keyEncipherment");
final X509Certificate endEntityCert = CertGenUtils.generateSignedCertificate(
endEntityPrincipal,
null,
endEntityKeyPair,
subCaCert,
subCaKeyPair.getPrivate(),
true,
notBefore,
notAfter,
null,
buildKeyUsage(endEntityKeyUsage),
Set.of(new ExtendedKeyUsage(KeyPurposeId.anyExtendedKeyUsage))
);
final X509Certificate[] certChain = new X509Certificate[] { endEntityCert, subCaCert, rootCaCert };
// verify generateSignedCertificate performed DN chaining correctly
assertThat(endEntityCert.getIssuerX500Principal(), equalTo(subCaCert.getSubjectX500Principal()));
assertThat(subCaCert.getIssuerX500Principal(), equalTo(rootCaCert.getSubjectX500Principal()));
assertThat(rootCaCert.getIssuerX500Principal(), equalTo(rootCaCert.getSubjectX500Principal()));
// verify custom extended key usage
assertThat(endEntityCert.getExtendedKeyUsage(), equalTo(List.of(KeyPurposeId.anyExtendedKeyUsage.toASN1Primitive().toString())));
// verify cert chaining based on PKIX rules (ex: SubjectDNs/IssuerDNs, SKIs/AKIs, BC, KU, EKU, etc)
final KeyStore trustStore = KeyStore.getInstance("PKCS12", "SunJSSE"); // EX: SunJSSE, BC, BC-FIPS
trustStore.load(null, null);
trustStore.setCertificateEntry("trustAnchor", rootCaCert); // anchor: any part of the chain, or issuer of last entry in chain
validateEndEntityTlsChain(trustStore, certChain, true, true);
// verify custom key usages
assertExpectedKeyUsage(rootCaCert, rootCaKeyUsages);
assertExpectedKeyUsage(subCaCert, subCaKeyUsage);
// when key usage is not specified, the key usage bits should be null
if (endEntityKeyUsage == null) {
assertThat(endEntityCert.getKeyUsage(), is(nullValue()));
assertThat(endEntityCert.getCriticalExtensionOIDs().contains(KeyUsageExtension.KEY_USAGE_OID.toString()), is(false));
} else {
assertExpectedKeyUsage(endEntityCert, endEntityKeyUsage);
}
}
public void testBuildKeyUsage() {
// sanity check that lookup maps are containing the same keyUsage entries
assertThat(KEY_USAGE_BITS.keySet(), containsInAnyOrder(KEY_USAGE_MAPPINGS.keySet().toArray()));
// passing null or empty list of keyUsage names should return null
assertThat(buildKeyUsage(null), is(nullValue()));
assertThat(buildKeyUsage(List.of()), is(nullValue()));
// invalid names should throw IAE
var e = expectThrows(IllegalArgumentException.class, () -> buildKeyUsage(List.of(randomAlphanumericOfLength(5))));
assertThat(e.getMessage(), containsString("Unknown keyUsage"));
{
final List<String> keyUsages = randomNonEmptySubsetOf(KEY_USAGE_MAPPINGS.keySet());
final KeyUsage keyUsage = buildKeyUsage(keyUsages);
for (String usageName : keyUsages) {
final Integer usage = KEY_USAGE_MAPPINGS.get(usageName);
assertThat(" mapping for keyUsage [" + usageName + "] is missing", usage, is(notNullValue()));
assertThat("expected keyUsage [" + usageName + "] to be set in [" + keyUsage + "]", keyUsage.hasUsages(usage), is(true));
}
final Set<String> keyUsagesNotSet = KEY_USAGE_MAPPINGS.keySet()
.stream()
.filter(u -> keyUsages.contains(u) == false)
.collect(Collectors.toSet());
for (String usageName : keyUsagesNotSet) {
final Integer usage = KEY_USAGE_MAPPINGS.get(usageName);
assertThat(" mapping for keyUsage [" + usageName + "] is missing", usage, is(notNullValue()));
assertThat(
"expected keyUsage [" + usageName + "] not to be set in [" + keyUsage + "]",
keyUsage.hasUsages(usage),
is(false)
);
}
}
{
// test that duplicates and whitespaces are ignored
KeyUsage keyUsage = buildKeyUsage(
List.of("digitalSignature ", " nonRepudiation", "\tkeyEncipherment", "keyEncipherment\n")
);
assertThat(keyUsage.hasUsages(KEY_USAGE_MAPPINGS.get("digitalSignature")), is(true));
assertThat(keyUsage.hasUsages(KEY_USAGE_MAPPINGS.get("nonRepudiation")), is(true));
assertThat(keyUsage.hasUsages(KEY_USAGE_MAPPINGS.get("digitalSignature")), is(true));
assertThat(keyUsage.hasUsages(KEY_USAGE_MAPPINGS.get("keyEncipherment")), is(true));
}
}
public void testIsValidKeyUsage() {
assertThat(isValidKeyUsage(randomFrom(KEY_USAGE_MAPPINGS.keySet())), is(true));
assertThat(isValidKeyUsage(randomAlphanumericOfLength(5)), is(false));
// keyUsage names are case-sensitive
assertThat(isValidKeyUsage("DigitalSignature"), is(false));
// white-spaces are ignored
assertThat(isValidKeyUsage("keyAgreement "), is(true));
assertThat(isValidKeyUsage("keyCertSign\n"), is(true));
assertThat(isValidKeyUsage("\tcRLSign "), is(true));
}
public static void assertExpectedKeyUsage(X509Certificate certificate, List<String> expectedKeyUsage) {
final boolean[] keyUsage = certificate.getKeyUsage();
assertThat("Expected " + KEY_USAGE_BITS.size() + " bits for key usage", keyUsage.length, equalTo(KEY_USAGE_BITS.size()));
final Set<Integer> expectedBitsToBeSet = expectedKeyUsage.stream().map(KEY_USAGE_BITS::get).collect(Collectors.toSet());
for (int i = 0; i < keyUsage.length; i++) {
if (expectedBitsToBeSet.contains(i)) {
assertThat("keyUsage bit [" + i + "] expected to be set: " + expectedKeyUsage, keyUsage[i], equalTo(true));
} else {
assertThat("keyUsage bit [" + i + "] not expected to be set: " + expectedKeyUsage, keyUsage[i], equalTo(false));
}
}
// key usage must be marked as critical
assertThat(
"keyUsage extension should be marked as critical",
certificate.getCriticalExtensionOIDs().contains(KeyUsageExtension.KEY_USAGE_OID.toString()),
is(true)
);
}
/**
* Perform PKIX TLS certificate chain validation. This validates trust and chain correctness, not server hostname verification.
* Wrap TrustStore with TrustManager[], and select first element that implements the X509ExtendedTrustManager interface.
* Use it to call checkClientTrusted() and/or checkServerTrusted().
*
* @param trustStore TrustStore must contain at least one trust anchor (ex: root/sub/cross/link CA certs, self-signed end entity)
* @param certChain Cert chain to be validated using trust anchor(s) in TrustStore. Partial chains are supported.
* @param doTlsClientCheck Flag to enable TLS client end entity validation checking.
* @param doTlsServerCheck Flag to enable TLS server end entity validation checking.
* @throws Exception X509ExtendedTrustManager initialization failed, or cert chain validation failed.
*/
public static void validateEndEntityTlsChain(
final KeyStore trustStore,
final X509Certificate[] certChain,
final boolean doTlsClientCheck,
final boolean doTlsServerCheck
) throws Exception {
final TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance("PKIX", "SunJSSE");
trustManagerFactory.init(trustStore);
final TrustManager[] trustManagers = trustManagerFactory.getTrustManagers(); // Usually only 1 is returned
X509ExtendedTrustManager x509ExtendedTrustManager = null;
for (final TrustManager trustManager : trustManagers) {
if (trustManager instanceof X509ExtendedTrustManager) {
x509ExtendedTrustManager = (X509ExtendedTrustManager) trustManagers[0];
break; // use the first TrustManager that implements the javax.net.ssl.X509ExtendedTrustManager interface
}
}
if (null == x509ExtendedTrustManager) {
throw new UnsupportedOperationException("Expected at least one javax.net.ssl.X509ExtendedTrustManager");
}
// TLS authType is a substring of cipher suite. It controls what OpenJDK EndEntityChecker.java checks in KU/EKU/etc extensions.
// EKU=null|clientAuth, KU=digitalSignature, authType=DHE_DSS/DHE_RSA/ECDHE_ECDSA/ECDHE_RSA/RSA_EXPORT/UNKNOWN
if (doTlsClientCheck) {
x509ExtendedTrustManager.checkClientTrusted(certChain, "ECDHE_RSA");
}
// EKU=null|serverAuth, KU=digitalSignature, authType=DHE_DSS/DHE_RSA/ECDHE_ECDSA/ECDHE_RSA/RSA_EXPORT/UNKNOWN
// EKU=null|serverAuth, KU=keyEncipherment, authType=RSA
// EKU=null|serverAuth, KU=keyAgreement, authType=DH_DSS/DH_RSA/ECDH_ECDSA/ECDH_RSA
if (doTlsServerCheck) {
x509ExtendedTrustManager.checkServerTrusted(certChain, "ECDHE_RSA");
}
}
}
| CertGenUtilsTests |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/util/ContextDataProviderTest.java | {
"start": 2653,
"end": 2964
} | class ____ implements ContextDataProvider {
@Override
public Map<String, String> supplyContextData() {
final Map<String, String> contextData = new HashMap<>();
contextData.put("testKey", "testValue");
return contextData;
}
}
}
| TestContextDataProvider |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/type/IterationType.java | {
"start": 353,
"end": 5884
} | class ____ extends SimpleType
{
private static final long serialVersionUID = 1L;
protected final JavaType _iteratedType;
protected IterationType(Class<?> cls, TypeBindings bindings,
JavaType superClass, JavaType[] superInts, JavaType iteratedType,
Object valueHandler, Object typeHandler, boolean asStatic)
{
super(cls, bindings, superClass, superInts, Objects.hashCode(iteratedType),
valueHandler, typeHandler, asStatic);
_iteratedType = iteratedType;
}
/**
* Constructor used when upgrading into this type (via {@link #upgradeFrom},
* the usual way for {@link IterationType}s to come into existence.
* Sets up what is considered the "base" iteration type
*/
protected IterationType(TypeBase base, JavaType iteratedType)
{
super(base);
_iteratedType = iteratedType;
}
/**
* Factory method that can be used to "upgrade" a basic type into iteration
* type; usually done via {@link TypeModifier}
*
* @param baseType Resolved non-iteration type (usually {@link SimpleType}) that is being upgraded
* @param iteratedType Iterated type; usually the first and only type parameter, but not necessarily
*/
public static IterationType upgradeFrom(JavaType baseType, JavaType iteratedType) {
Objects.requireNonNull(iteratedType);
// 19-Oct-2015, tatu: Not sure if and how other types could be used as base;
// will cross that bridge if and when need be
if (baseType instanceof TypeBase base) {
return new IterationType(base, iteratedType);
}
throw new IllegalArgumentException("Cannot upgrade from an instance of "+baseType.getClass());
}
public static IterationType construct(Class<?> cls, TypeBindings bindings,
JavaType superClass, JavaType[] superInts, JavaType iteratedType)
{
return new IterationType(cls, bindings, superClass, superInts,
iteratedType, null, null, false);
}
@Override
public JavaType withContentType(JavaType contentType) {
if (_iteratedType == contentType) {
return this;
}
return new IterationType(_class, _bindings, _superClass, _superInterfaces,
contentType, _valueHandler, _typeHandler, _asStatic);
}
@Override
public IterationType withTypeHandler(Object h)
{
if (h == _typeHandler) {
return this;
}
return new IterationType(_class, _bindings, _superClass, _superInterfaces,
_iteratedType, _valueHandler, h, _asStatic);
}
@Override
public IterationType withContentTypeHandler(Object h)
{
if (h == _iteratedType.getTypeHandler()) {
return this;
}
return new IterationType(_class, _bindings, _superClass, _superInterfaces,
_iteratedType.withTypeHandler(h),
_valueHandler, _typeHandler, _asStatic);
}
@Override
public IterationType withValueHandler(Object h) {
if (h == _valueHandler) {
return this;
}
return new IterationType(_class, _bindings,
_superClass, _superInterfaces, _iteratedType,
h, _typeHandler,_asStatic);
}
@Override
public IterationType withContentValueHandler(Object h) {
if (h == _iteratedType.getValueHandler()) {
return this;
}
return new IterationType(_class, _bindings,
_superClass, _superInterfaces, _iteratedType.withValueHandler(h),
_valueHandler, _typeHandler, _asStatic);
}
@Override
public IterationType withStaticTyping() {
if (_asStatic) {
return this;
}
return new IterationType(_class, _bindings, _superClass, _superInterfaces,
_iteratedType.withStaticTyping(),
_valueHandler, _typeHandler, true);
}
@Override
public JavaType refine(Class<?> rawType, TypeBindings bindings,
JavaType superClass, JavaType[] superInterfaces) {
return new IterationType(rawType, _bindings,
superClass, superInterfaces, _iteratedType,
_valueHandler, _typeHandler, _asStatic);
}
@Override
protected String buildCanonicalName()
{
StringBuilder sb = new StringBuilder();
sb.append(_class.getName());
if ((_iteratedType != null) && _hasNTypeParameters(1)) {
sb.append('<');
sb.append(_iteratedType.toCanonical());
sb.append('>');
}
return sb.toString();
}
/*
/**********************************************************************
/* Public API overrides
/**********************************************************************
*/
@Override
public JavaType getContentType() {
return _iteratedType;
}
@Override
public boolean hasContentType() {
return true;
}
@Override
public boolean isIterationType() {
return true;
}
@Override
public StringBuilder getErasedSignature(StringBuilder sb) {
return _classSignature(_class, sb, true);
}
@Override
public StringBuilder getGenericSignature(StringBuilder sb)
{
_classSignature(_class, sb, false);
sb.append('<');
sb = _iteratedType.getGenericSignature(sb);
sb.append(">;");
return sb;
}
}
| IterationType |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CopySignErrorTests.java | {
"start": 797,
"end": 1396
} | class ____ extends ErrorsForCasesWithoutExamplesTestCase {
@Override
protected List<TestCaseSupplier> cases() {
return paramsToSuppliers(CopySignTests.parameters());
}
@Override
protected Expression build(Source source, List<Expression> args) {
return new CopySign(source, args.get(0), args.get(1));
}
@Override
protected Matcher<String> expectedTypeErrorMatcher(List<Set<DataType>> validPerPosition, List<DataType> signature) {
return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, i) -> "numeric"));
}
}
| CopySignErrorTests |
java | apache__flink | flink-datastream-api/src/main/java/org/apache/flink/datastream/api/context/JobInfo.java | {
"start": 997,
"end": 1306
} | interface ____ {
/**
* Get the name of current job.
*
* @return the name of current job
*/
String getJobName();
/** Get the {@link ExecutionMode} of current job. */
ExecutionMode getExecutionMode();
/** Execution mode of this current job. */
@Experimental
| JobInfo |
java | elastic__elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/profile/RestEnableProfileAction.java | {
"start": 1184,
"end": 2196
} | class ____ extends SecurityBaseRestHandler {
public RestEnableProfileAction(Settings settings, XPackLicenseState licenseState) {
super(settings, licenseState);
}
@Override
public List<Route> routes() {
return List.of(new Route(POST, "/_security/profile/{uid}/_enable"), new Route(PUT, "/_security/profile/{uid}/_enable"));
}
@Override
public String getName() {
return "security_enable_profile_action";
}
@Override
public RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException {
final String uid = request.param("uid");
final WriteRequest.RefreshPolicy refreshPolicy = WriteRequest.RefreshPolicy.parse(request.param("refresh", "wait_for"));
return channel -> client.execute(
SetProfileEnabledAction.INSTANCE,
new SetProfileEnabledRequest(uid, true, refreshPolicy),
new RestToXContentListener<>(channel)
);
}
}
| RestEnableProfileAction |
java | alibaba__nacos | common/src/test/java/com/alibaba/nacos/common/notify/DefaultSharePublisherTest.java | {
"start": 1364,
"end": 4901
} | class ____ {
private static final AtomicLong TEST_SEQUENCE = new AtomicLong();
DefaultSharePublisher defaultSharePublisher;
@Mock
SmartSubscriber smartSubscriber1;
@Mock
SmartSubscriber smartSubscriber2;
@BeforeEach
void setUp() throws Exception {
defaultSharePublisher = new DefaultSharePublisher();
defaultSharePublisher.init(SlowEvent.class, 2);
}
@AfterEach
void tearDown() throws Exception {
defaultSharePublisher.shutdown();
}
@Test
void testRemoveSubscribers() {
defaultSharePublisher.addSubscriber(smartSubscriber1, MockSlowEvent1.class);
defaultSharePublisher.addSubscriber(smartSubscriber1, MockSlowEvent2.class);
defaultSharePublisher.addSubscriber(smartSubscriber2, MockSlowEvent2.class);
assertEquals(2, defaultSharePublisher.getSubscribers().size());
defaultSharePublisher.removeSubscriber(smartSubscriber1, MockSlowEvent1.class);
defaultSharePublisher.removeSubscriber(smartSubscriber1, MockSlowEvent2.class);
defaultSharePublisher.removeSubscriber(smartSubscriber2, MockSlowEvent2.class);
assertEquals(0, defaultSharePublisher.getSubscribers().size());
}
@Test
void testReceiveEventWithoutSubscriber() {
defaultSharePublisher.addSubscriber(smartSubscriber1, MockSlowEvent1.class);
defaultSharePublisher.addSubscriber(smartSubscriber2, MockSlowEvent2.class);
defaultSharePublisher.receiveEvent(new SlowEvent() {
private static final long serialVersionUID = 5996336354563933789L;
@Override
public long sequence() {
return super.sequence();
}
});
verify(smartSubscriber1, never()).onEvent(any(SlowEvent.class));
verify(smartSubscriber2, never()).onEvent(any(SlowEvent.class));
}
@Test
void testReceiveEventWithSubscriber() {
defaultSharePublisher.addSubscriber(smartSubscriber1, MockSlowEvent1.class);
defaultSharePublisher.addSubscriber(smartSubscriber2, MockSlowEvent2.class);
defaultSharePublisher.receiveEvent(new MockSlowEvent1());
verify(smartSubscriber1).onEvent(any(MockSlowEvent1.class));
verify(smartSubscriber2, never()).onEvent(any(MockSlowEvent1.class));
defaultSharePublisher.receiveEvent(new MockSlowEvent2());
verify(smartSubscriber1, never()).onEvent(any(MockSlowEvent2.class));
verify(smartSubscriber2).onEvent(any(MockSlowEvent2.class));
}
@Test
void testIgnoreExpiredEvent() throws InterruptedException {
MockSlowEvent1 mockSlowEvent1 = new MockSlowEvent1();
MockSlowEvent2 mockSlowEvent2 = new MockSlowEvent2();
defaultSharePublisher.addSubscriber(smartSubscriber1, MockSlowEvent1.class);
defaultSharePublisher.addSubscriber(smartSubscriber2, MockSlowEvent2.class);
defaultSharePublisher.publish(mockSlowEvent1);
defaultSharePublisher.publish(mockSlowEvent2);
TimeUnit.MILLISECONDS.sleep(1500);
verify(smartSubscriber1).onEvent(mockSlowEvent1);
verify(smartSubscriber2).onEvent(mockSlowEvent2);
reset(smartSubscriber1);
when(smartSubscriber1.ignoreExpireEvent()).thenReturn(true);
defaultSharePublisher.publish(mockSlowEvent1);
TimeUnit.MILLISECONDS.sleep(100);
verify(smartSubscriber1, never()).onEvent(mockSlowEvent1);
}
private static | DefaultSharePublisherTest |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/exc/TestExceptionsDuringWriting.java | {
"start": 1095,
"end": 4116
} | class ____
extends ValueSerializer<Bean>
{
@Override
public void serialize(Bean value, JsonGenerator jgen, SerializationContext provider)
{
throw new IllegalArgumentException("test string");
}
}
/*
/**********************************************************
/* Tests
/**********************************************************
*/
/**
* Unit test that verifies that by default all exceptions except for
* JacksonExceptions are caught and wrapped.
*/
@Test
public void testCatchAndRethrow()
throws Exception
{
SimpleModule module = new SimpleModule("test-exceptions", Version.unknownVersion());
module.addSerializer(Bean.class, new SerializerWithErrors());
ObjectMapper mapper = jsonMapperBuilder()
.addModule(module)
.build();
try {
StringWriter sw = new StringWriter();
// And just to make things more interesting, let's create a nested data struct...
Bean[] b = { new Bean() };
List<Bean[]> l = new ArrayList<Bean[]>();
l.add(b);
mapper.writeValue(sw, l);
fail("Should have gotten an exception");
} catch (JacksonException e) { // too generic but will do for now
// should contain original message somewhere
verifyException(e, "test string");
Throwable root = e.getCause();
assertNotNull(root);
if (!(root instanceof IllegalArgumentException)) {
fail("Wrapped exception not IAE, but "+root.getClass());
}
}
}
/**
* Unit test for verifying that regular IOExceptions are not wrapped
* but are passed through as is.
*/
@SuppressWarnings("resource")
@Test
public void testExceptionWithSimpleMapper()
throws Exception
{
ObjectMapper mapper = newJsonMapper();
try {
BrokenStringWriter sw = new BrokenStringWriter("TEST");
mapper.writeValue(sw, createLongObject());
fail("Should have gotten an exception");
} catch (JacksonException e) {
verifyException(e, "TEST");
Throwable root = e.getCause();
assertNotNull(root);
if (!(root instanceof IOException)) {
fail("Wrapped exception not IOException, but "+root.getClass());
}
}
}
/*
/**********************************************************
/* Helper methods
/**********************************************************
*/
Object createLongObject()
{
List<Object> leaf = new ArrayList<Object>();
for (int i = 0; i < 256; ++i) {
leaf.add(Integer.valueOf(i));
}
List<Object> root = new ArrayList<Object>(256);
for (int i = 0; i < 256; ++i) {
root.add(leaf);
}
return root;
}
}
| SerializerWithErrors |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/properties/bind/JavaBeanBinderTests.java | {
"start": 34188,
"end": 34508
} | class ____ {
private @Nullable String property;
void setProperty(int property) {
this.property = String.valueOf(property);
}
void setProperty(@Nullable String property) {
this.property = property;
}
@Nullable String getProperty() {
return this.property;
}
}
static | PropertyWithOverloadedSetter |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/enumerated/mappedSuperclass/EnumeratedWithMappedSuperclassTest.java | {
"start": 3757,
"end": 4681
} | class ____ extends Entity {
@Column(name = "description_lang1", nullable = false, length = 100)
private String descriptionLang1;
@Column(name = "description_lang2", length = 100)
private String descriptionLang2;
@Column(name = "description_lang3", length = 100)
private String descriptionLang3;
public String getDescriptionLang1() {
return this.descriptionLang1;
}
public void setDescriptionLang1(final String descriptionLang1) {
this.descriptionLang1 = descriptionLang1;
}
public String getDescriptionLang2() {
return this.descriptionLang2;
}
public void setDescriptionLang2(final String descriptionLang2) {
this.descriptionLang2 = descriptionLang2;
}
public String getDescriptionLang3() {
return this.descriptionLang3;
}
public void setDescriptionLang3(final String descriptionLang3) {
this.descriptionLang3 = descriptionLang3;
}
}
public static | DescriptionEntity |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/concurrent/ConstantInitializer.java | {
"start": 1478,
"end": 1975
} | class ____<T> implements ConcurrentInitializer<T> {
/** Constant for the format of the string representation. */
private static final String FMT_TO_STRING = "ConstantInitializer@%d [ object = %s ]";
/** Stores the managed object. */
private final T object;
/**
* Creates a new instance of {@link ConstantInitializer} and initializes it
* with the object to be managed. The {@code get()} method will always
* return the object passed here. This | ConstantInitializer |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/conf/TestQueueCapacityConfigParser.java | {
"start": 2243,
"end": 11206
} | class ____ {
private static final String ALL_RESOURCE_TEMPLATE = "[memory-mb=%s, vcores=%s, yarn.io/gpu=%s]";
private static final String MEMORY_VCORE_TEMPLATE = "[memory-mb=%s, vcores=%s]";
private static final String MEMORY_ABSOLUTE = "12Gi";
private static final float VCORE_ABSOLUTE = 6;
private static final float GPU_ABSOLUTE = 10;
private static final float PERCENTAGE_VALUE = 50f;
private static final float MEMORY_MIXED = 1024;
private static final float WEIGHT_VALUE = 6;
private static final String QUEUE = "root.test";
private static final QueuePath QUEUE_PATH = new QueuePath("root.test");
private static final QueuePath ROOT = new QueuePath(CapacitySchedulerConfiguration.ROOT);
private static final String ABSOLUTE_RESOURCE = String.format(
ALL_RESOURCE_TEMPLATE, MEMORY_ABSOLUTE, VCORE_ABSOLUTE, GPU_ABSOLUTE);
private static final String ABSOLUTE_RESOURCE_MEMORY_VCORE = String.format(
MEMORY_VCORE_TEMPLATE, MEMORY_ABSOLUTE, VCORE_ABSOLUTE);
private static final String MIXED_RESOURCE = String.format(
ALL_RESOURCE_TEMPLATE, MEMORY_MIXED, PERCENTAGE_VALUE + "%", WEIGHT_VALUE + "w");
private static final String RESOURCE_TYPES = GPU_URI;
public static final String NONEXISTINGSUFFIX = "50nonexistingsuffix";
public static final String EMPTY_BRACKET = "[]";
public static final String INVALID_CAPACITY_BRACKET = "[invalid]";
public static final String INVALID_CAPACITY_FORMAT = "[memory-100,vcores-60]";
private final QueueCapacityConfigParser capacityConfigParser
= new QueueCapacityConfigParser();
@Test
public void testPercentageCapacityConfig() {
QueueCapacityVector percentageCapacityVector =
capacityConfigParser.parse(Float.toString(PERCENTAGE_VALUE), QUEUE_PATH);
QueueCapacityVectorEntry memory = percentageCapacityVector.getResource(MEMORY_URI);
QueueCapacityVectorEntry vcore = percentageCapacityVector.getResource(VCORES_URI);
assertEquals(ResourceUnitCapacityType.PERCENTAGE, memory.getVectorResourceType());
assertEquals(PERCENTAGE_VALUE, memory.getResourceValue(), EPSILON);
assertEquals(ResourceUnitCapacityType.PERCENTAGE, vcore.getVectorResourceType());
assertEquals(PERCENTAGE_VALUE, vcore.getResourceValue(), EPSILON);
QueueCapacityVector rootCapacityVector =
capacityConfigParser.parse(Float.toString(PERCENTAGE_VALUE), ROOT);
QueueCapacityVectorEntry memoryRoot = rootCapacityVector.getResource(MEMORY_URI);
QueueCapacityVectorEntry vcoreRoot = rootCapacityVector.getResource(VCORES_URI);
assertEquals(ResourceUnitCapacityType.PERCENTAGE, memoryRoot.getVectorResourceType());
assertEquals(100f, memoryRoot.getResourceValue(), EPSILON);
assertEquals(ResourceUnitCapacityType.PERCENTAGE, vcoreRoot.getVectorResourceType());
assertEquals(100f, vcoreRoot.getResourceValue(), EPSILON);
}
@Test
public void testWeightCapacityConfig() {
QueueCapacityVector weightCapacityVector = capacityConfigParser.parse(WEIGHT_VALUE + "w",
QUEUE_PATH);
QueueCapacityVectorEntry memory = weightCapacityVector.getResource(MEMORY_URI);
QueueCapacityVectorEntry vcore = weightCapacityVector.getResource(VCORES_URI);
assertEquals(ResourceUnitCapacityType.WEIGHT, memory.getVectorResourceType());
assertEquals(WEIGHT_VALUE, memory.getResourceValue(), EPSILON);
assertEquals(ResourceUnitCapacityType.WEIGHT, vcore.getVectorResourceType());
assertEquals(WEIGHT_VALUE, vcore.getResourceValue(), EPSILON);
}
@Test
public void testAbsoluteCapacityVectorConfig() {
CapacitySchedulerConfiguration conf = new CapacitySchedulerConfiguration();
conf.set(getQueuePrefix(QUEUE_PATH) + CapacitySchedulerConfiguration.CAPACITY,
ABSOLUTE_RESOURCE);
conf.set(YarnConfiguration.RESOURCE_TYPES, RESOURCE_TYPES);
ResourceUtils.resetResourceTypes(conf);
QueueCapacityVector absoluteCapacityVector = capacityConfigParser.parse(ABSOLUTE_RESOURCE,
QUEUE_PATH);
assertEquals(ResourceUnitCapacityType.ABSOLUTE,
absoluteCapacityVector.getResource(MEMORY_URI).getVectorResourceType());
assertEquals(12 * GB, absoluteCapacityVector.getResource(MEMORY_URI)
.getResourceValue(), EPSILON);
assertEquals(ResourceUnitCapacityType.ABSOLUTE,
absoluteCapacityVector.getResource(VCORES_URI).getVectorResourceType());
assertEquals(VCORE_ABSOLUTE, absoluteCapacityVector.getResource(VCORES_URI)
.getResourceValue(), EPSILON);
assertEquals(ResourceUnitCapacityType.ABSOLUTE,
absoluteCapacityVector.getResource(GPU_URI).getVectorResourceType());
assertEquals(GPU_ABSOLUTE, absoluteCapacityVector.getResource(GPU_URI)
.getResourceValue(), EPSILON);
QueueCapacityVector withoutGpuVector = capacityConfigParser
.parse(ABSOLUTE_RESOURCE_MEMORY_VCORE, QUEUE_PATH);
assertEquals(3, withoutGpuVector.getResourceCount());
assertEquals(0f, withoutGpuVector.getResource(GPU_URI).getResourceValue(), EPSILON);
}
@Test
public void testMixedCapacityConfig() {
CapacitySchedulerConfiguration conf = new CapacitySchedulerConfiguration();
conf.set(YarnConfiguration.RESOURCE_TYPES, RESOURCE_TYPES);
ResourceUtils.resetResourceTypes(conf);
QueueCapacityVector mixedCapacityVector =
capacityConfigParser.parse(MIXED_RESOURCE, QUEUE_PATH);
assertEquals(ResourceUnitCapacityType.ABSOLUTE,
mixedCapacityVector.getResource(MEMORY_URI).getVectorResourceType());
assertEquals(MEMORY_MIXED, mixedCapacityVector.getResource(MEMORY_URI)
.getResourceValue(), EPSILON);
assertEquals(ResourceUnitCapacityType.PERCENTAGE,
mixedCapacityVector.getResource(VCORES_URI).getVectorResourceType());
assertEquals(PERCENTAGE_VALUE,
mixedCapacityVector.getResource(VCORES_URI).getResourceValue(), EPSILON);
assertEquals(ResourceUnitCapacityType.WEIGHT,
mixedCapacityVector.getResource(GPU_URI).getVectorResourceType());
assertEquals(WEIGHT_VALUE,
mixedCapacityVector.getResource(GPU_URI).getResourceValue(), EPSILON);
// Test undefined capacity type default value
QueueCapacityVector mixedCapacityVectorWithGpuUndefined =
capacityConfigParser.parse(ABSOLUTE_RESOURCE_MEMORY_VCORE, QUEUE_PATH);
assertEquals(ResourceUnitCapacityType.ABSOLUTE,
mixedCapacityVectorWithGpuUndefined.getResource(MEMORY_URI).getVectorResourceType());
assertEquals(0, mixedCapacityVectorWithGpuUndefined.getResource(GPU_URI)
.getResourceValue(), EPSILON);
}
@Test
public void testInvalidCapacityConfigs() {
QueueCapacityVector capacityVectorWithInvalidSuffix =
capacityConfigParser.parse(NONEXISTINGSUFFIX, QUEUE_PATH);
List<QueueCapacityVectorEntry> entriesWithInvalidSuffix =
Lists.newArrayList(capacityVectorWithInvalidSuffix.iterator());
assertEquals(0, entriesWithInvalidSuffix.size());
QueueCapacityVector invalidDelimiterCapacityVector =
capacityConfigParser.parse(INVALID_CAPACITY_FORMAT, QUEUE_PATH);
List<QueueCapacityVectorEntry> invalidDelimiterEntries =
Lists.newArrayList(invalidDelimiterCapacityVector.iterator());
assertEquals(0, invalidDelimiterEntries.size());
QueueCapacityVector invalidCapacityVector =
capacityConfigParser.parse(INVALID_CAPACITY_BRACKET, QUEUE_PATH);
List<QueueCapacityVectorEntry> resources =
Lists.newArrayList(invalidCapacityVector.iterator());
assertEquals(0, resources.size());
QueueCapacityVector emptyBracketCapacityVector =
capacityConfigParser.parse(EMPTY_BRACKET, QUEUE_PATH);
List<QueueCapacityVectorEntry> emptyEntries =
Lists.newArrayList(emptyBracketCapacityVector.iterator());
assertEquals(0, emptyEntries.size());
QueueCapacityVector emptyCapacity =
capacityConfigParser.parse("", QUEUE_PATH);
List<QueueCapacityVectorEntry> emptyResources =
Lists.newArrayList(emptyCapacity.iterator());
assertEquals(emptyResources.size(), 0);
QueueCapacityVector nonSetCapacity =
capacityConfigParser.parse(null, QUEUE_PATH);
List<QueueCapacityVectorEntry> nonSetResources =
Lists.newArrayList(nonSetCapacity.iterator());
assertEquals(nonSetResources.size(), 0);
}
@Test
public void testZeroAbsoluteCapacityConfig() {
QueueCapacityVector weightCapacityVector =
capacityConfigParser.parse(String.format(MEMORY_VCORE_TEMPLATE, 0, 0), QUEUE_PATH);
QueueCapacityVectorEntry memory = weightCapacityVector.getResource(MEMORY_URI);
QueueCapacityVectorEntry vcore = weightCapacityVector.getResource(VCORES_URI);
assertEquals(ResourceUnitCapacityType.ABSOLUTE, memory.getVectorResourceType());
assertEquals(0, memory.getResourceValue(), EPSILON);
assertEquals(ResourceUnitCapacityType.ABSOLUTE, vcore.getVectorResourceType());
assertEquals(0, vcore.getResourceValue(), EPSILON);
}
}
| TestQueueCapacityConfigParser |
java | apache__thrift | lib/java/src/main/java/org/apache/thrift/partial/ThriftMetadata.java | {
"start": 1668,
"end": 1726
} | class ____ mainly used by {@code TDeserializer}.
*/
public | is |
java | apache__avro | lang/java/perf/src/main/java/org/apache/avro/perf/test/basic/ArrayTest.java | {
"start": 3132,
"end": 4377
} | class ____ extends BasicState {
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
final int items = getBatchSize() / 4;
encoder.writeArrayStart();
encoder.setItemCount(1);
encoder.startItem();
encoder.writeArrayStart();
encoder.setItemCount(items);
for (int i = 0; i < getBatchSize(); i += 4) {
encoder.startItem();
encoder.writeFloat(super.getRandom().nextFloat());
encoder.writeFloat(super.getRandom().nextFloat());
encoder.writeFloat(super.getRandom().nextFloat());
encoder.writeFloat(super.getRandom().nextFloat());
}
encoder.writeArrayEnd();
encoder.writeArrayEnd();
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = super.newDecoder(this.testData);
}
}
}
| TestStateDecode |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/derivedidentities/e4/b/FinancialHistory.java | {
"start": 492,
"end": 730
} | class ____ {
@Id
String id; // overriding not allowed ... // default join column name is overridden @MapsId
@Temporal(TemporalType.DATE)
Date lastupdate;
@JoinColumn(name = "FK")
@MapsId
@ManyToOne
Person patient;
}
| FinancialHistory |
java | apache__flink | flink-connectors/flink-connector-files/src/main/java/org/apache/flink/connector/file/table/DeserializationSchemaAdapter.java | {
"start": 2139,
"end": 4116
} | class ____ implements BulkFormat<RowData, FileSourceSplit> {
private static final int BATCH_SIZE = 100;
private final DeserializationSchema<RowData> deserializationSchema;
public DeserializationSchemaAdapter(DeserializationSchema<RowData> deserializationSchema) {
this.deserializationSchema = deserializationSchema;
}
private DeserializationSchema<RowData> createDeserialization() throws IOException {
try {
DeserializationSchema<RowData> deserialization =
InstantiationUtil.clone(deserializationSchema);
deserialization.open(
new DeserializationSchema.InitializationContext() {
@Override
public MetricGroup getMetricGroup() {
return new UnregisteredMetricsGroup();
}
@Override
public UserCodeClassLoader getUserCodeClassLoader() {
return (UserCodeClassLoader)
Thread.currentThread().getContextClassLoader();
}
});
return deserialization;
} catch (Exception e) {
throw new IOException(e);
}
}
@Override
public Reader createReader(Configuration config, FileSourceSplit split) throws IOException {
return new Reader(config, split);
}
@Override
public Reader restoreReader(Configuration config, FileSourceSplit split) throws IOException {
Reader reader = new Reader(config, split);
reader.seek(split.getReaderPosition().get().getRecordsAfterOffset());
return reader;
}
@Override
public boolean isSplittable() {
return true;
}
@Override
public TypeInformation<RowData> getProducedType() {
return deserializationSchema.getProducedType();
}
private | DeserializationSchemaAdapter |
java | apache__kafka | clients/clients-integration-tests/src/test/java/org/apache/kafka/clients/admin/DeleteTopicTest.java | {
"start": 2887,
"end": 19132
} | class ____ {
private static final String DEFAULT_TOPIC = "topic";
private final Map<Integer, List<Integer>> expectedReplicaAssignment = Map.of(0, List.of(0, 1, 2));
@ClusterTest
public void testDeleteTopicWithAllAliveReplicas(ClusterInstance cluster) throws Exception {
try (Admin admin = cluster.admin()) {
admin.createTopics(List.of(new NewTopic(DEFAULT_TOPIC, expectedReplicaAssignment))).all().get();
admin.deleteTopics(List.of(DEFAULT_TOPIC)).all().get();
cluster.waitTopicDeletion(DEFAULT_TOPIC);
}
}
@ClusterTest
public void testResumeDeleteTopicWithRecoveredFollower(ClusterInstance cluster) throws Exception {
try (Admin admin = cluster.admin()) {
admin.createTopics(List.of(new NewTopic(DEFAULT_TOPIC, expectedReplicaAssignment))).all().get();
TopicPartition topicPartition = new TopicPartition(DEFAULT_TOPIC, 0);
int leaderId = waitUtilLeaderIsKnown(cluster.brokers(), topicPartition);
KafkaBroker follower = findFollower(cluster.brokers().values(), leaderId);
// shutdown one follower replica
follower.shutdown();
admin.deleteTopics(List.of(DEFAULT_TOPIC)).all().get();
TestUtils.waitForCondition(() -> cluster.brokers().values()
.stream()
.filter(broker -> broker.config().brokerId() != follower.config().brokerId())
.allMatch(b -> b.logManager().getLog(topicPartition, false).isEmpty()),
"Online replicas have not deleted log.");
follower.startup();
cluster.waitTopicDeletion(DEFAULT_TOPIC);
}
}
@ClusterTest(brokers = 4)
public void testPartitionReassignmentDuringDeleteTopic(ClusterInstance cluster) throws Exception {
try (Admin admin = cluster.admin()) {
admin.createTopics(List.of(new NewTopic(DEFAULT_TOPIC, expectedReplicaAssignment))).all().get();
TopicPartition topicPartition = new TopicPartition(DEFAULT_TOPIC, 0);
Map<Integer, KafkaBroker> servers = findPartitionHostingBrokers(cluster.brokers());
int leaderId = waitUtilLeaderIsKnown(cluster.brokers(), topicPartition);
KafkaBroker follower = findFollower(servers.values(), leaderId);
follower.shutdown();
admin.deleteTopics(List.of(DEFAULT_TOPIC)).all().get();
Properties properties = new Properties();
properties.setProperty(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.bootstrapServers());
try (Admin otherAdmin = Admin.create(properties)) {
waitUtilTopicGone(otherAdmin);
assertThrows(ExecutionException.class, () -> otherAdmin.alterPartitionReassignments(
Map.of(topicPartition, Optional.of(new NewPartitionReassignment(List.of(1, 2, 3))))
).all().get());
}
follower.startup();
cluster.waitTopicDeletion(DEFAULT_TOPIC);
}
}
@ClusterTest(brokers = 4)
public void testIncreasePartitionCountDuringDeleteTopic(ClusterInstance cluster) throws Exception {
try (Admin admin = cluster.admin()) {
admin.createTopics(List.of(new NewTopic(DEFAULT_TOPIC, expectedReplicaAssignment))).all().get();
TopicPartition topicPartition = new TopicPartition(DEFAULT_TOPIC, 0);
Map<Integer, KafkaBroker> partitionHostingBrokers = findPartitionHostingBrokers(cluster.brokers());
waitForReplicaCreated(partitionHostingBrokers, topicPartition, "Replicas for topic test not created.");
int leaderId = waitUtilLeaderIsKnown(partitionHostingBrokers, topicPartition);
KafkaBroker follower = findFollower(partitionHostingBrokers.values(), leaderId);
// shutdown a broker to make sure the following topic deletion will be suspended
follower.shutdown();
admin.deleteTopics(List.of(DEFAULT_TOPIC)).all().get();
// increase the partition count for topic
Properties properties = new Properties();
properties.setProperty(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.bootstrapServers());
try (Admin otherAdmin = Admin.create(properties)) {
otherAdmin.createPartitions(Map.of(DEFAULT_TOPIC, NewPartitions.increaseTo(2))).all().get();
} catch (ExecutionException ignored) {
// do nothing
}
follower.startup();
cluster.waitTopicDeletion(DEFAULT_TOPIC);
}
}
@ClusterTest
public void testDeleteTopicDuringAddPartition(ClusterInstance cluster) throws Exception {
try (Admin admin = cluster.admin()) {
admin.createTopics(List.of(new NewTopic(DEFAULT_TOPIC, expectedReplicaAssignment))).all().get();
int leaderId = waitUtilLeaderIsKnown(cluster.brokers(), new TopicPartition(DEFAULT_TOPIC, 0));
TopicPartition newTopicPartition = new TopicPartition(DEFAULT_TOPIC, 1);
KafkaBroker follower = findFollower(cluster.brokers().values(), leaderId);
follower.shutdown();
// wait until the broker is in shutting down state
int followerBrokerId = follower.config().brokerId();
TestUtils.waitForCondition(() -> follower.brokerState().equals(BrokerState.SHUTTING_DOWN),
"Follower " + followerBrokerId + " was not shutdown");
Map<String, NewPartitions> newPartitionSet = Map.of(DEFAULT_TOPIC, NewPartitions.increaseTo(3));
admin.createPartitions(newPartitionSet);
cluster.waitTopicCreation(DEFAULT_TOPIC, 3);
admin.deleteTopics(List.of(DEFAULT_TOPIC)).all().get();
follower.startup();
// test if topic deletion is resumed
cluster.waitTopicDeletion(DEFAULT_TOPIC);
waitForReplicaDeleted(cluster.brokers(), newTopicPartition, "Replica logs not for new partition [" + DEFAULT_TOPIC + ",1] not deleted after delete topic is complete.");
}
}
@ClusterTest
public void testAddPartitionDuringDeleteTopic(ClusterInstance cluster) throws Exception {
try (Admin admin = cluster.admin()) {
admin.createTopics(List.of(new NewTopic(DEFAULT_TOPIC, expectedReplicaAssignment))).all().get();
// partitions to be added to the topic later
TopicPartition newTopicPartition = new TopicPartition(DEFAULT_TOPIC, 1);
admin.deleteTopics(List.of(DEFAULT_TOPIC)).all().get();
Map<String, NewPartitions> newPartitionSet = Map.of(DEFAULT_TOPIC, NewPartitions.increaseTo(3));
admin.createPartitions(newPartitionSet);
cluster.waitTopicDeletion(DEFAULT_TOPIC);
waitForReplicaDeleted(cluster.brokers(), newTopicPartition, "Replica logs not deleted after delete topic is complete");
}
}
@ClusterTest
public void testRecreateTopicAfterDeletion(ClusterInstance cluster) throws Exception {
try (Admin admin = cluster.admin()) {
admin.createTopics(List.of(new NewTopic(DEFAULT_TOPIC, expectedReplicaAssignment))).all().get();
TopicPartition topicPartition = new TopicPartition(DEFAULT_TOPIC, 0);
admin.deleteTopics(List.of(DEFAULT_TOPIC)).all().get();
cluster.waitTopicDeletion(DEFAULT_TOPIC);
// re-create topic on same replicas
admin.createTopics(List.of(new NewTopic(DEFAULT_TOPIC, expectedReplicaAssignment))).all().get();
waitForReplicaCreated(cluster.brokers(), topicPartition, "Replicas for topic " + DEFAULT_TOPIC + " not created.");
}
}
@ClusterTest
public void testDeleteNonExistingTopic(ClusterInstance cluster) throws Exception {
try (Admin admin = cluster.admin()) {
admin.createTopics(List.of(new NewTopic(DEFAULT_TOPIC, expectedReplicaAssignment))).all().get();
TopicPartition topicPartition = new TopicPartition(DEFAULT_TOPIC, 0);
String topic = "test2";
TestUtils.waitForCondition(() -> {
try {
admin.deleteTopics(List.of(topic)).all().get();
return false;
} catch (Exception exception) {
return exception.getCause() instanceof UnknownTopicOrPartitionException;
}
}, "Topic test2 should not exist.");
cluster.waitTopicDeletion(topic);
waitForReplicaCreated(cluster.brokers(), topicPartition, "Replicas for topic test not created.");
cluster.waitUntilLeaderIsElectedOrChangedWithAdmin(admin, DEFAULT_TOPIC, 0, 1000);
}
}
@ClusterTest(serverProperties = {
@ClusterConfigProperty(key = "log.cleaner.enable", value = "true"),
@ClusterConfigProperty(key = "log.cleanup.policy", value = "compact"),
@ClusterConfigProperty(key = "log.cleaner.dedupe.buffer.size", value = "1048577")
})
public void testDeleteTopicWithCleaner(ClusterInstance cluster) throws Exception {
try (Admin admin = cluster.admin()) {
admin.createTopics(List.of(new NewTopic(DEFAULT_TOPIC, expectedReplicaAssignment))).all().get();
TopicPartition topicPartition = new TopicPartition(DEFAULT_TOPIC, 0);
// for simplicity, we are validating cleaner offsets on a single broker
KafkaBroker server = cluster.brokers().values().stream().findFirst().orElseThrow();
TestUtils.waitForCondition(() -> server.logManager().getLog(topicPartition, false).isDefined(),
"Replicas for topic test not created.");
UnifiedLog log = server.logManager().getLog(topicPartition, false).get();
writeDups(100, 3, log);
// force roll the segment so that cleaner can work on it
server.logManager().getLog(topicPartition, false).get().roll(Optional.empty());
// wait for cleaner to clean
server.logManager().cleaner().awaitCleaned(topicPartition, 0, 60000);
admin.deleteTopics(List.of(DEFAULT_TOPIC)).all().get();
cluster.waitTopicDeletion(DEFAULT_TOPIC);
}
}
@ClusterTest
public void testDeleteTopicAlreadyMarkedAsDeleted(ClusterInstance cluster) throws Exception {
try (Admin admin = cluster.admin()) {
admin.createTopics(List.of(new NewTopic(DEFAULT_TOPIC, expectedReplicaAssignment))).all().get();
admin.deleteTopics(List.of(DEFAULT_TOPIC)).all().get();
TestUtils.waitForCondition(() -> {
try {
admin.deleteTopics(List.of(DEFAULT_TOPIC)).all().get();
return false;
} catch (Exception exception) {
return exception.getCause() instanceof UnknownTopicOrPartitionException;
}
}, "Topic " + DEFAULT_TOPIC + " should be marked for deletion or already deleted.");
cluster.waitTopicDeletion(DEFAULT_TOPIC);
}
}
@ClusterTest(controllers = 1,
serverProperties = {@ClusterConfigProperty(key = ServerConfigs.DELETE_TOPIC_ENABLE_CONFIG, value = "false")})
public void testDisableDeleteTopic(ClusterInstance cluster) throws Exception {
try (Admin admin = cluster.admin()) {
admin.createTopics(List.of(new NewTopic(DEFAULT_TOPIC, expectedReplicaAssignment))).all().get();
TopicPartition topicPartition = new TopicPartition(DEFAULT_TOPIC, 0);
TestUtils.waitForCondition(() -> {
try {
admin.deleteTopics(List.of(DEFAULT_TOPIC)).all().get();
return false;
} catch (Exception exception) {
return exception.getCause() instanceof TopicDeletionDisabledException;
}
}, "TopicDeletionDisabledException should be returned when deleting " + DEFAULT_TOPIC);
waitForReplicaCreated(cluster.brokers(), topicPartition, "TopicDeletionDisabledException should be returned when deleting " + DEFAULT_TOPIC);
assertDoesNotThrow(() -> admin.describeTopics(List.of(DEFAULT_TOPIC)).allTopicNames().get());
assertDoesNotThrow(() -> waitUtilLeaderIsKnown(cluster.brokers(), topicPartition));
}
}
private int waitUtilLeaderIsKnown(Map<Integer, KafkaBroker> idToBroker,
TopicPartition topicPartition) throws InterruptedException {
TestUtils.waitForCondition(() -> isLeaderKnown(idToBroker, topicPartition).get().isPresent(), 15000,
"Partition " + topicPartition + " not made yet" + " after 15 seconds");
return isLeaderKnown(idToBroker, topicPartition).get().get();
}
private void waitForReplicaCreated(Map<Integer, KafkaBroker> clusters,
TopicPartition topicPartition,
String failMessage) throws InterruptedException {
TestUtils.waitForCondition(() -> clusters.values().stream().allMatch(broker ->
broker.logManager().getLog(topicPartition, false).isDefined()),
failMessage);
}
private void waitForReplicaDeleted(Map<Integer, KafkaBroker> clusters,
TopicPartition newTopicPartition,
String failMessage) throws InterruptedException {
TestUtils.waitForCondition(() -> clusters.values().stream().allMatch(broker ->
broker.logManager().getLog(newTopicPartition, false).isEmpty()),
failMessage);
}
private Supplier<Optional<Integer>> isLeaderKnown(Map<Integer, KafkaBroker> idToBroker, TopicPartition topicPartition) {
return () -> idToBroker.values()
.stream()
.filter(broker -> broker.replicaManager().onlinePartition(topicPartition)
.exists(tp -> tp.leaderIdIfLocal().isDefined()))
.map(broker -> broker.config().brokerId())
.findFirst();
}
private KafkaBroker findFollower(Collection<KafkaBroker> idToBroker, int leaderId) {
return idToBroker.stream()
.filter(broker -> broker.config().brokerId() != leaderId)
.findFirst()
.orElseGet(() -> fail("Can't find any follower"));
}
private void waitUtilTopicGone(Admin admin) throws Exception {
TestUtils.waitForCondition(() -> {
try {
admin.describeTopics(List.of(DEFAULT_TOPIC)).allTopicNames().get();
return false;
} catch (Exception exception) {
return exception.getCause() instanceof UnknownTopicOrPartitionException;
}
}, "Topic" + DEFAULT_TOPIC + " should be deleted");
}
private Map<Integer, KafkaBroker> findPartitionHostingBrokers(Map<Integer, KafkaBroker> brokers) {
return brokers.entrySet()
.stream()
.filter(broker -> expectedReplicaAssignment.get(0).contains(broker.getValue().config().brokerId()))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
}
private List<int[]> writeDups(int numKeys, int numDups, UnifiedLog log) {
int counter = 0;
List<int[]> result = new ArrayList<>();
for (int i = 0; i < numDups; i++) {
for (int key = 0; key < numKeys; key++) {
int count = counter;
log.appendAsLeader(
MemoryRecords.withRecords(
Compression.NONE,
new SimpleRecord(
String.valueOf(key).getBytes(),
String.valueOf(counter).getBytes()
)
),
0,
AppendOrigin.CLIENT,
RequestLocal.noCaching(),
VerificationGuard.SENTINEL,
(short) 0
);
counter++;
result.add(new int[] {key, count});
}
}
return result;
}
}
| DeleteTopicTest |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/DefaultScheduledPollConsumerBridgeErrorHandlerTest.java | {
"start": 3682,
"end": 3979
} | class ____ extends ScheduledPollConsumer {
public MyConsumer(Endpoint endpoint, Processor processor) {
super(endpoint, processor);
}
@Override
protected int poll() {
throw new IllegalArgumentException("Simulated");
}
}
}
| MyConsumer |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/cache/spi/TimestampsCache.java | {
"start": 983,
"end": 2156
} | interface ____ {
/**
* The region used to store all timestamp data.
*/
TimestampsRegion getRegion();
/**
* Perform pre-invalidation of the passed spaces (table names)
* against the timestamp region data.
*/
void preInvalidate(
String[] spaces,
SharedSessionContractImplementor session);
/**
* Perform invalidation of the passed spaces (table names)
* against the timestamp region data.
*/
void invalidate(
String[] spaces,
SharedSessionContractImplementor session);
/**
* Perform an up-to-date check for the given set of query spaces as
* part of verifying the validity of cached query results.
*/
boolean isUpToDate(
String[] spaces,
Long timestamp,
SharedSessionContractImplementor session);
/**
* Perform an up-to-date check for the given set of query spaces as
* part of verifying the validity of cached query results.
*/
boolean isUpToDate(
Collection<String> spaces,
Long timestamp,
SharedSessionContractImplementor session);
default void clear() throws CacheException {
getRegion().clear();
}
default void destroy() {
// nothing to do - the region itself is destroyed
}
}
| TimestampsCache |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java | {
"start": 22399,
"end": 24333
} | class ____ extends AbstractService
implements ContainerAllocator {
private MRAppContainerAllocator allocator;
TestCleanupContainerAllocator() {
super(TestCleanupContainerAllocator.class.getName());
allocator = new MRAppContainerAllocator();
}
@Override
public void handle(ContainerAllocatorEvent event) {
allocator.handle(event);
}
@Override
protected void serviceStop() throws Exception {
numStops++;
ContainerAllocatorStopped = numStops;
super.serviceStop();
}
}
@Override
public RMHeartbeatHandler getRMHeartbeatHandler() {
return getStubbedHeartbeatHandler(getContext());
}
@Override
public void cleanupStagingDir() throws IOException {
numStops++;
stagingDirCleanedup = numStops;
}
@Override
protected void sysexit() {
}
}
private static RMHeartbeatHandler getStubbedHeartbeatHandler(
final AppContext appContext) {
return new RMHeartbeatHandler() {
@Override
public long getLastHeartbeatTime() {
return appContext.getClock().getTime();
}
@Override
public void runOnNextHeartbeat(Runnable callback) {
callback.run();
}
};
}
@Test
@Timeout(value = 20)
public void testStagingCleanupOrder() throws Exception {
MRAppTestCleanup app = new MRAppTestCleanup(1, 1, true,
this.getClass().getName(), true);
JobImpl job = (JobImpl)app.submit(new Configuration());
app.waitForState(job, JobState.SUCCEEDED);
app.verifyCompleted();
int waitTime = 20 * 1000;
while (waitTime > 0 && app.numStops < 2) {
Thread.sleep(100);
waitTime -= 100;
}
// assert ContainerAllocatorStopped and then tagingDirCleanedup
assertEquals(1, app.ContainerAllocatorStopped);
assertEquals(2, app.stagingDirCleanedup);
}
}
| TestCleanupContainerAllocator |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/builder/ReflectionToStringBuilderExcludeTest.java | {
"start": 1269,
"end": 5035
} | class ____ {
@SuppressWarnings("unused")
private final String secretField = SECRET_VALUE;
@SuppressWarnings("unused")
private final String showField = NOT_SECRET_VALUE;
}
private static final String NOT_SECRET_FIELD = "showField";
private static final String NOT_SECRET_VALUE = "Hello World!";
private static final String SECRET_FIELD = "secretField";
private static final String SECRET_VALUE = "secret value";
@Test
void test_toStringExclude() {
final String toString = ReflectionToStringBuilder.toStringExclude(new TestFixture(), SECRET_FIELD);
validateSecretFieldAbsent(toString);
}
@Test
void test_toStringExcludeArray() {
final String toString = ReflectionToStringBuilder.toStringExclude(new TestFixture(), SECRET_FIELD);
validateSecretFieldAbsent(toString);
}
@Test
void test_toStringExcludeArrayWithNull() {
final String toString = ReflectionToStringBuilder.toStringExclude(new TestFixture(), new String[]{null});
validateSecretFieldPresent(toString);
}
@Test
void test_toStringExcludeArrayWithNulls() {
final String toString = ReflectionToStringBuilder.toStringExclude(new TestFixture(), null, null);
validateSecretFieldPresent(toString);
}
@Test
void test_toStringExcludeCollection() {
final List<String> excludeList = new ArrayList<>();
excludeList.add(SECRET_FIELD);
final String toString = ReflectionToStringBuilder.toStringExclude(new TestFixture(), excludeList);
validateSecretFieldAbsent(toString);
}
@Test
void test_toStringExcludeCollectionWithNull() {
final List<String> excludeList = new ArrayList<>();
excludeList.add(null);
final String toString = ReflectionToStringBuilder.toStringExclude(new TestFixture(), excludeList);
validateSecretFieldPresent(toString);
}
@Test
void test_toStringExcludeCollectionWithNulls() {
final List<String> excludeList = new ArrayList<>();
excludeList.add(null);
excludeList.add(null);
final String toString = ReflectionToStringBuilder.toStringExclude(new TestFixture(), excludeList);
validateSecretFieldPresent(toString);
}
@Test
void test_toStringExcludeEmptyArray() {
final String toString = ReflectionToStringBuilder.toStringExclude(new TestFixture(), ArrayUtils.EMPTY_STRING_ARRAY);
validateSecretFieldPresent(toString);
}
@Test
void test_toStringExcludeEmptyCollection() {
final String toString = ReflectionToStringBuilder.toStringExclude(new TestFixture(), new ArrayList<>());
validateSecretFieldPresent(toString);
}
@Test
void test_toStringExcludeNullArray() {
final String toString = ReflectionToStringBuilder.toStringExclude(new TestFixture(), (String[]) null);
validateSecretFieldPresent(toString);
}
@Test
void test_toStringExcludeNullCollection() {
final String toString = ReflectionToStringBuilder.toStringExclude(new TestFixture(), (Collection<String>) null);
validateSecretFieldPresent(toString);
}
private void validateNonSecretField(final String toString) {
assertTrue(toString.contains(NOT_SECRET_FIELD));
assertTrue(toString.contains(NOT_SECRET_VALUE));
}
private void validateSecretFieldAbsent(final String toString) {
assertEquals(ArrayUtils.INDEX_NOT_FOUND, toString.indexOf(SECRET_VALUE));
validateNonSecretField(toString);
}
private void validateSecretFieldPresent(final String toString) {
assertTrue(toString.indexOf(SECRET_VALUE) > 0);
validateNonSecretField(toString);
}
}
| TestFixture |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/GeoCoderEndpointBuilderFactory.java | {
"start": 9932,
"end": 12428
} | interface ____
extends
EndpointProducerBuilder {
default GeoCoderEndpointBuilder basic() {
return (GeoCoderEndpointBuilder) this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedGeoCoderEndpointBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedGeoCoderEndpointBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
}
public | AdvancedGeoCoderEndpointBuilder |
java | spring-projects__spring-security | oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/userinfo/DefaultReactiveOAuth2UserService.java | {
"start": 2967,
"end": 10977
} | class ____ implements ReactiveOAuth2UserService<OAuth2UserRequest, OAuth2User> {
private static final String INVALID_USER_INFO_RESPONSE_ERROR_CODE = "invalid_user_info_response";
private static final String MISSING_USER_INFO_URI_ERROR_CODE = "missing_user_info_uri";
private static final String MISSING_USER_NAME_ATTRIBUTE_ERROR_CODE = "missing_user_name_attribute";
private static final ParameterizedTypeReference<Map<String, Object>> STRING_OBJECT_MAP = new ParameterizedTypeReference<>() {
};
private static final ParameterizedTypeReference<Map<String, String>> STRING_STRING_MAP = new ParameterizedTypeReference<>() {
};
private Converter<OAuth2UserRequest, Converter<Map<String, Object>, Map<String, Object>>> attributesConverter = (
request) -> (attributes) -> attributes;
private WebClient webClient = WebClient.create();
@Override
public Mono<OAuth2User> loadUser(OAuth2UserRequest userRequest) throws OAuth2AuthenticationException {
return Mono.defer(() -> {
Assert.notNull(userRequest, "userRequest cannot be null");
String userInfoUri = userRequest.getClientRegistration()
.getProviderDetails()
.getUserInfoEndpoint()
.getUri();
if (!StringUtils.hasText(userInfoUri)) {
OAuth2Error oauth2Error = new OAuth2Error(MISSING_USER_INFO_URI_ERROR_CODE,
"Missing required UserInfo Uri in UserInfoEndpoint for Client Registration: "
+ userRequest.getClientRegistration().getRegistrationId(),
null);
throw new OAuth2AuthenticationException(oauth2Error, oauth2Error.toString());
}
String userNameAttributeName = userRequest.getClientRegistration()
.getProviderDetails()
.getUserInfoEndpoint()
.getUserNameAttributeName();
if (!StringUtils.hasText(userNameAttributeName)) {
OAuth2Error oauth2Error = new OAuth2Error(MISSING_USER_NAME_ATTRIBUTE_ERROR_CODE,
"Missing required \"user name\" attribute name in UserInfoEndpoint for Client Registration: "
+ userRequest.getClientRegistration().getRegistrationId(),
null);
throw new OAuth2AuthenticationException(oauth2Error, oauth2Error.toString());
}
AuthenticationMethod authenticationMethod = userRequest.getClientRegistration()
.getProviderDetails()
.getUserInfoEndpoint()
.getAuthenticationMethod();
WebClient.RequestHeadersSpec<?> requestHeadersSpec = getRequestHeaderSpec(userRequest, userInfoUri,
authenticationMethod);
// @formatter:off
Mono<Map<String, Object>> userAttributes = requestHeadersSpec.retrieve()
.onStatus(HttpStatusCode::isError, (response) ->
parse(response)
.map((userInfoErrorResponse) -> {
String description = userInfoErrorResponse.getErrorObject().getDescription();
OAuth2Error oauth2Error = new OAuth2Error(INVALID_USER_INFO_RESPONSE_ERROR_CODE, description,
null);
throw new OAuth2AuthenticationException(oauth2Error, oauth2Error.toString());
})
)
.bodyToMono(DefaultReactiveOAuth2UserService.STRING_OBJECT_MAP)
.mapNotNull((attributes) -> this.attributesConverter.convert(userRequest).convert(attributes));
return userAttributes.map((attrs) -> {
GrantedAuthority authority = new OAuth2UserAuthority(attrs, userNameAttributeName);
Set<GrantedAuthority> authorities = new HashSet<>();
authorities.add(authority);
OAuth2AccessToken token = userRequest.getAccessToken();
for (String scope : token.getScopes()) {
authorities.add(new SimpleGrantedAuthority("SCOPE_" + scope));
}
return new DefaultOAuth2User(authorities, attrs, userNameAttributeName);
})
.onErrorMap((ex) -> (ex instanceof UnsupportedMediaTypeException ||
ex.getCause() instanceof UnsupportedMediaTypeException), (ex) -> {
String contentType = (ex instanceof UnsupportedMediaTypeException) ?
((UnsupportedMediaTypeException) ex).getContentType().toString() :
((UnsupportedMediaTypeException) ex.getCause()).getContentType().toString();
String errorMessage = "An error occurred while attempting to retrieve the UserInfo Resource from '"
+ userRequest.getClientRegistration().getProviderDetails().getUserInfoEndpoint()
.getUri()
+ "': response contains invalid content type '" + contentType + "'. "
+ "The UserInfo Response should return a JSON object (content type 'application/json') "
+ "that contains a collection of name and value pairs of the claims about the authenticated End-User. "
+ "Please ensure the UserInfo Uri in UserInfoEndpoint for Client Registration '"
+ userRequest.getClientRegistration().getRegistrationId()
+ "' conforms to the UserInfo Endpoint, "
+ "as defined in OpenID Connect 1.0: 'https://openid.net/specs/openid-connect-core-1_0.html#UserInfo'";
OAuth2Error oauth2Error = new OAuth2Error(INVALID_USER_INFO_RESPONSE_ERROR_CODE, errorMessage,
null);
throw new OAuth2AuthenticationException(oauth2Error, oauth2Error.toString(), ex);
})
.onErrorMap((ex) -> {
OAuth2Error oauth2Error = new OAuth2Error(INVALID_USER_INFO_RESPONSE_ERROR_CODE,
"An error occurred reading the UserInfo response: " + ex.getMessage(), null);
return new OAuth2AuthenticationException(oauth2Error, oauth2Error.toString(), ex);
});
});
// @formatter:on
}
private WebClient.RequestHeadersSpec<?> getRequestHeaderSpec(OAuth2UserRequest userRequest, String userInfoUri,
AuthenticationMethod authenticationMethod) {
if (AuthenticationMethod.FORM.equals(authenticationMethod)) {
// @formatter:off
return this.webClient.post()
.uri(userInfoUri)
.header(HttpHeaders.ACCEPT, MediaType.APPLICATION_JSON_VALUE)
.header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_FORM_URLENCODED_VALUE)
.bodyValue("access_token=" + userRequest.getAccessToken().getTokenValue());
// @formatter:on
}
// @formatter:off
return this.webClient.get()
.uri(userInfoUri)
.header(HttpHeaders.ACCEPT, MediaType.APPLICATION_JSON_VALUE)
.headers((headers) -> headers
.setBearerAuth(userRequest.getAccessToken().getTokenValue())
);
// @formatter:on
}
/**
* Use this strategy to adapt user attributes into a format understood by Spring
* Security; by default, the original attributes are preserved.
*
* <p>
* This can be helpful, for example, if the user attribute is nested. Since Spring
* Security needs the username attribute to be at the top level, you can use this
* method to do:
*
* <pre>
* DefaultReactiveOAuth2UserService userService = new DefaultReactiveOAuth2UserService();
* userService.setAttributesConverter((userRequest) -> (attributes) ->
* Map<String, Object> userObject = (Map<String, Object>) attributes.get("user");
* attributes.put("user-name", userObject.get("user-name"));
* return attributes;
* });
* </pre>
* @param attributesConverter the attribute adaptation strategy to use
* @since 6.3
*/
public void setAttributesConverter(
Converter<OAuth2UserRequest, Converter<Map<String, Object>, Map<String, Object>>> attributesConverter) {
Assert.notNull(attributesConverter, "attributesConverter cannot be null");
this.attributesConverter = attributesConverter;
}
/**
* Sets the {@link WebClient} used for retrieving the user endpoint
* @param webClient the client to use
*/
public void setWebClient(WebClient webClient) {
Assert.notNull(webClient, "webClient cannot be null");
this.webClient = webClient;
}
private static Mono<UserInfoErrorResponse> parse(ClientResponse httpResponse) {
String wwwAuth = httpResponse.headers().asHttpHeaders().getFirst(HttpHeaders.WWW_AUTHENTICATE);
if (StringUtils.hasLength(wwwAuth)) {
// Bearer token error?
return Mono.fromCallable(() -> UserInfoErrorResponse.parse(wwwAuth));
}
// Other error?
return httpResponse.bodyToMono(STRING_STRING_MAP)
.map((body) -> new UserInfoErrorResponse(ErrorObject.parse(new JSONObject(body))));
}
}
| DefaultReactiveOAuth2UserService |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/xsd/LocalXsdResolver.java | {
"start": 805,
"end": 2888
} | class ____ {
public static String latestJpaVerison() {
return "3.2";
}
public static boolean isValidJpaVersion(String version) {
return switch ( version ) {
case "1.0", "2.0", "2.1", "2.2", "3.0", "3.1", "3.2" -> true;
default -> false;
};
}
public static URL resolveLocalXsdUrl(String resourceName) {
try {
final URL url = LocalXsdResolver.class.getClassLoader().getResource( resourceName );
if ( url != null ) {
return url;
}
}
catch (Exception ignore) {
}
if ( resourceName.startsWith( "/" ) ) {
resourceName = resourceName.substring( 1 );
try {
final URL url = LocalXsdResolver.class.getClassLoader().getResource( resourceName );
if ( url != null ) {
return url;
}
}
catch (Exception ignore) {
}
}
// Last: we try name as a URL
try {
return new URL( resourceName );
}
catch (Exception ignore) {
}
return null;
}
public static Schema resolveLocalXsdSchema(String schemaResourceName) {
final URL url = resolveLocalXsdUrl( schemaResourceName );
if ( url == null ) {
throw new XsdException( "Unable to locate schema [" + schemaResourceName + "] via classpath", schemaResourceName );
}
try {
final var schemaStream = url.openStream();
try {
return SchemaFactory.newInstance( W3C_XML_SCHEMA_NS_URI )
.newSchema( new StreamSource( url.openStream() ) );
}
catch ( SAXException | IOException e ) {
throw new XsdException( "Unable to load schema [" + schemaResourceName + "]", e, schemaResourceName );
}
finally {
try {
schemaStream.close();
}
catch ( IOException e ) {
JAXB_LOGGER.problemClosingSchemaStream( e.toString() );
}
}
}
catch ( IOException e ) {
throw new XsdException( "Stream error handling schema url [" + url.toExternalForm() + "]", schemaResourceName );
}
}
public static XsdDescriptor buildXsdDescriptor(String resourceName, String version, String namespaceUri) {
return new XsdDescriptor( resourceName, resolveLocalXsdSchema( resourceName ), version, namespaceUri );
}
}
| LocalXsdResolver |
java | apache__dubbo | dubbo-registry/dubbo-registry-api/src/test/java/org/apache/dubbo/registry/support/FailbackRegistryTest.java | {
"start": 6499,
"end": 8680
} | class ____ extends FailbackRegistry {
private final URL serviceUrl;
CountDownLatch latch;
private volatile boolean bad = false;
/**
* @param url
* @param serviceUrl
*/
public MockRegistry(URL url, URL serviceUrl, CountDownLatch latch) {
super(url);
this.serviceUrl = serviceUrl;
this.latch = latch;
}
/**
* @param bad the bad to set
*/
public void setBad(boolean bad) {
this.bad = bad;
}
@Override
public void doRegister(URL url) {
if (bad) {
throw new RuntimeException("can not invoke!");
}
latch.countDown();
}
@Override
public void doUnregister(URL url) {
if (bad) {
throw new RuntimeException("can not invoke!");
}
latch.countDown();
}
@Override
public void doSubscribe(URL url, NotifyListener listener) {
if (bad) {
throw new RuntimeException("can not invoke!");
}
super.notify(url, listener, Arrays.asList(new URL[] {serviceUrl}));
latch.countDown();
}
@Override
public void doUnsubscribe(URL url, NotifyListener listener) {
if (bad) {
throw new RuntimeException("can not invoke!");
}
latch.countDown();
}
@Override
public boolean isAvailable() {
return true;
}
@Override
public void removeFailedRegisteredTask(URL url) {
if (bad) {
throw new RuntimeException("can not invoke!");
}
super.removeFailedRegisteredTask(url);
latch.countDown();
}
@Override
public void removeFailedSubscribedTask(URL url, NotifyListener listener) {
if (bad) {
throw new RuntimeException("can not invoke!");
}
super.removeFailedSubscribedTask(url, listener);
latch.countDown();
}
}
}
| MockRegistry |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/FluxCreate.java | {
"start": 16890,
"end": 17551
} | class ____<T> extends BaseSink<T> {
IgnoreSink(CoreSubscriber<? super T> actual) {
super(actual);
}
@Override
public FluxSink<T> next(T t) {
if (isTerminated()) {
Operators.onNextDropped(t, ctx);
return this;
}
if (isCancelled()) {
Operators.onDiscard(t, ctx);
return this;
}
actual.onNext(t);
for (; ; ) {
long s = requested;
long r = s & Long.MAX_VALUE;
if (r == 0L || REQUESTED.compareAndSet(this, s, (r - 1) | (s & Long.MIN_VALUE))) {
return this;
}
}
}
@Override
public String toString() {
return "FluxSink(" + OverflowStrategy.IGNORE + ")";
}
}
static abstract | IgnoreSink |
java | spring-projects__spring-security | oauth2/oauth2-jose/src/test/java/org/springframework/security/oauth2/jwt/JwtValidatorsTests.java | {
"start": 1360,
"end": 5009
} | class ____ {
private static final String ISS_CLAIM = "iss";
@Test
public void createWhenJwtIssuerValidatorIsPresentThenCreateDefaultValidatorWithJwtIssuerValidator() {
OAuth2TokenValidator<Jwt> validator = JwtValidators
.createDefaultWithValidators(new JwtIssuerValidator(ISS_CLAIM));
assertThat(containsByType(validator, JwtIssuerValidator.class)).isTrue();
assertThat(containsByType(validator, JwtTimestampValidator.class)).isTrue();
assertThat(containsByType(validator, X509CertificateThumbprintValidator.class)).isTrue();
}
@Test
@SuppressWarnings("unchecked")
public void createWhenJwtTimestampValidatorIsPresentThenCreateDefaultValidatorWithOnlyOneJwtTimestampValidator() {
OAuth2TokenValidator<Jwt> validator = JwtValidators.createDefaultWithValidators(new JwtTimestampValidator());
DelegatingOAuth2TokenValidator<Jwt> delegatingOAuth2TokenValidator = (DelegatingOAuth2TokenValidator<Jwt>) validator;
Collection<OAuth2TokenValidator<Jwt>> tokenValidators = (Collection<OAuth2TokenValidator<Jwt>>) ReflectionTestUtils
.getField(delegatingOAuth2TokenValidator, "tokenValidators");
assertThat(containsByType(validator, JwtTimestampValidator.class)).isTrue();
assertThat(containsByType(validator, X509CertificateThumbprintValidator.class)).isTrue();
assertThat(containsByType(validator, JwtTypeValidator.class)).isTrue();
assertThat(Objects.requireNonNull(tokenValidators).size()).isEqualTo(3);
}
@Test
public void createWhenEmptyValidatorsThenThrowsException() {
assertThatException().isThrownBy(() -> JwtValidators.createDefaultWithValidators(Collections.emptyList()));
}
@Test
public void createAtJwtWhenIssuerClientIdAudienceThenBuilds() {
Jwt.Builder builder = TestJwts.jwt();
OAuth2TokenValidator<Jwt> validator = JwtValidators.createAtJwtValidator()
.audience("audience")
.clientId("clientId")
.issuer("issuer")
.build();
OAuth2TokenValidatorResult result = validator.validate(builder.build());
assertThat(result.getErrors().toString()).contains("at+jwt")
.contains("aud")
.contains("client_id")
.contains("iss");
result = validator.validate(builder.header(JoseHeaderNames.TYP, "JWT").build());
assertThat(result.getErrors().toString()).contains("at+jwt");
result = validator.validate(builder.header(JoseHeaderNames.TYP, "at+jwt").build());
assertThat(result.getErrors().toString()).doesNotContain("at+jwt");
result = validator.validate(builder.header(JoseHeaderNames.TYP, "application/at+jwt").build());
assertThat(result.getErrors().toString()).doesNotContain("at+jwt");
result = validator.validate(builder.audience(List.of("audience")).build());
assertThat(result.getErrors().toString()).doesNotContain("aud");
result = validator.validate(builder.claim("client_id", "clientId").build());
assertThat(result.getErrors().toString()).doesNotContain("client_id");
result = validator.validate(builder.issuer("issuer").build());
assertThat(result.getErrors().toString()).doesNotContain("iss");
}
@SuppressWarnings("unchecked")
private boolean containsByType(OAuth2TokenValidator<Jwt> validator, Class<? extends OAuth2TokenValidator<?>> type) {
DelegatingOAuth2TokenValidator<Jwt> delegatingOAuth2TokenValidator = (DelegatingOAuth2TokenValidator<Jwt>) validator;
Collection<OAuth2TokenValidator<Jwt>> tokenValidators = (Collection<OAuth2TokenValidator<Jwt>>) ReflectionTestUtils
.getField(delegatingOAuth2TokenValidator, "tokenValidators");
OAuth2TokenValidator<?> tokenValidator = CollectionUtils
.findValueOfType(Objects.requireNonNull(tokenValidators), type);
return tokenValidator != null;
}
}
| JwtValidatorsTests |
java | quarkusio__quarkus | extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/security/HttpUpgradeAnnotationTransformerTest.java | {
"start": 1061,
"end": 1651
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(Endpoint.class, WSClient.class, SecurityTestUtils.class, IdentityMock.class,
CdiBeanSecurity.class, AdminEndpoint.class));
@Inject
CdiBeanSecurity cdiBeanSecurity;
@Test
public void testSecurityChecksNotRepeated() {
// fact that HTTP Upgrade is secured is tested in HttpUpgradeRolesAllowedAnnotationTest
// this test | HttpUpgradeAnnotationTransformerTest |
java | netty__netty | transport-sctp/src/main/java/io/netty/channel/sctp/nio/NioSctpChannel.java | {
"start": 14254,
"end": 14567
} | class ____ extends DefaultSctpChannelConfig {
private NioSctpChannelConfig(NioSctpChannel channel, SctpChannel javaChannel) {
super(channel, javaChannel);
}
@Override
protected void autoReadCleared() {
clearReadPending();
}
}
}
| NioSctpChannelConfig |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/selection/methodgenerics/wildcards/IntersectionMapper.java | {
"start": 832,
"end": 1051
} | class ____<T> {
private final T wrapped;
public Wrapper(T wrapped) {
this.wrapped = wrapped;
}
public T getWrapped() {
return wrapped;
}
}
| Wrapper |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/vectors/IVFKnnSearchStrategyTests.java | {
"start": 683,
"end": 1864
} | class ____ extends ESTestCase {
public void testMaxScorePropagation() {
LongAccumulator accumulator = new LongAccumulator(Long::max, AbstractMaxScoreKnnCollector.LEAST_COMPETITIVE);
IVFKnnSearchStrategy strategy = new IVFKnnSearchStrategy(0.5f, accumulator);
MaxScoreTopKnnCollector collector = new MaxScoreTopKnnCollector(2, 1000, strategy);
strategy.setCollector(collector);
collector.collect(1, 0.9f);
long competitiveScore = NeighborQueue.encodeRaw(1, 0.9f);
// accumulator should now be updated
strategy.nextVectorsBlock();
assertEquals(competitiveScore, accumulator.get());
assertEquals(competitiveScore, collector.getMinCompetitiveDocScore());
// updated accumulator directly with more competitive score
competitiveScore = NeighborQueue.encodeRaw(2, 1.5f);
accumulator.accumulate(competitiveScore);
assertEquals(competitiveScore, accumulator.get());
strategy.nextVectorsBlock();
assertEquals(competitiveScore, collector.getMinCompetitiveDocScore());
assertEquals(competitiveScore, accumulator.get());
}
}
| IVFKnnSearchStrategyTests |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java | {
"start": 2428,
"end": 39579
} | class ____ extends RuntimeException {
private static final long serialVersionUID = 1L;
public InvalidTopologyException(String msg) {
super(msg);
}
}
/**
* Get an instance of NetworkTopology based on the value of the configuration
* parameter net.topology.impl.
*
* @param conf the configuration to be used
* @return an instance of NetworkTopology
*/
public static NetworkTopology getInstance(Configuration conf){
return getInstance(conf, InnerNodeImpl.FACTORY);
}
public static NetworkTopology getInstance(Configuration conf,
InnerNode.Factory factory) {
NetworkTopology nt = ReflectionUtils.newInstance(
conf.getClass(CommonConfigurationKeysPublic.NET_TOPOLOGY_IMPL_KEY,
NetworkTopology.class, NetworkTopology.class), conf);
return nt.init(factory);
}
protected NetworkTopology init(InnerNode.Factory factory) {
if (!factory.equals(this.factory)) {
// the constructor has initialized the factory to default. So only init
// again if another factory is specified.
this.factory = factory;
this.clusterMap = factory.newInnerNode(NodeBase.ROOT);
}
return this;
}
InnerNode.Factory factory;
/**
* the root cluster map
*/
InnerNode clusterMap;
/** Depth of all leaf nodes */
private int depthOfAllLeaves = -1;
/** rack counter */
protected int numOfRacks = 0;
/** empty rack map, rackname->nodenumber. */
private HashMap<String, Set<String>> rackMap =
new HashMap<String, Set<String>>();
/** decommission nodes, contained stoped nodes. */
private HashSet<String> decommissionNodes = new HashSet<>();
/** empty rack counter. */
private int numOfEmptyRacks = 0;
/**
* Whether or not this cluster has ever consisted of more than 1 rack,
* according to the NetworkTopology.
*/
private boolean clusterEverBeenMultiRack = false;
/** the lock used to manage access */
protected ReadWriteLock netlock = new ReentrantReadWriteLock(true);
// keeping the constructor because other components like MR still uses this.
public NetworkTopology() {
this.factory = InnerNodeImpl.FACTORY;
this.clusterMap = factory.newInnerNode(NodeBase.ROOT);
}
/** Add a leaf node
* Update node counter & rack counter if necessary
* @param node node to be added; can be null
* @exception IllegalArgumentException if add a node to a leave
or node to be added is not a leaf
*/
public void add(Node node) {
if (node==null) return;
int newDepth = NodeBase.locationToDepth(node.getNetworkLocation()) + 1;
netlock.writeLock().lock();
try {
if( node instanceof InnerNode ) {
throw new IllegalArgumentException(
"Not allow to add an inner node: "+NodeBase.getPath(node));
}
if ((depthOfAllLeaves != -1) && (depthOfAllLeaves != newDepth)) {
LOG.error("Error: can't add leaf node {} at depth {} to topology:{}\n",
NodeBase.getPath(node), newDepth, this);
throw new InvalidTopologyException("Failed to add " + NodeBase.getPath(node) +
": You cannot have a rack and a non-rack node at the same " +
"level of the network topology.");
}
Node rack = getNodeForNetworkLocation(node);
if (rack != null && !(rack instanceof InnerNode)) {
throw new IllegalArgumentException("Unexpected data node "
+ node.toString()
+ " at an illegal network location");
}
if (clusterMap.add(node)) {
LOG.info("Adding a new node: "+NodeBase.getPath(node));
if (rack == null) {
incrementRacks();
}
interAddNodeWithEmptyRack(node);
if (depthOfAllLeaves == -1) {
depthOfAllLeaves = node.getLevel();
}
}
LOG.debug("NetworkTopology became:\n{}", this);
} finally {
netlock.writeLock().unlock();
}
}
protected void incrementRacks() {
numOfRacks++;
if (!clusterEverBeenMultiRack && numOfRacks > 1) {
clusterEverBeenMultiRack = true;
}
}
/**
* Return a reference to the node given its string representation.
* Default implementation delegates to {@link #getNode(String)}.
*
* <p>To be overridden in subclasses for specific NetworkTopology
* implementations, as alternative to overriding the full {@link #add(Node)}
* method.
*
* @param node The string representation of this node's network location is
* used to retrieve a Node object.
* @return a reference to the node; null if the node is not in the tree
*
* @see #add(Node)
* @see #getNode(String)
*/
protected Node getNodeForNetworkLocation(Node node) {
return getNode(node.getNetworkLocation());
}
/**
* Given a string representation of a rack, return its children
* @param loc a path-like string representation of a rack
* @return a newly allocated list with all the node's children
*/
public List<Node> getDatanodesInRack(String loc) {
netlock.readLock().lock();
try {
loc = NodeBase.normalize(loc);
if (!NodeBase.ROOT.equals(loc)) {
loc = loc.substring(1);
}
InnerNode rack = (InnerNode) clusterMap.getLoc(loc);
return (rack == null) ? new ArrayList<>(0)
: new ArrayList<>(rack.getChildren());
} finally {
netlock.readLock().unlock();
}
}
/** Remove a node
* Update node counter and rack counter if necessary
* @param node node to be removed; can be null
*/
public void remove(Node node) {
if (node==null) return;
if( node instanceof InnerNode ) {
throw new IllegalArgumentException(
"Not allow to remove an inner node: "+NodeBase.getPath(node));
}
LOG.info("Removing a node: "+NodeBase.getPath(node));
netlock.writeLock().lock();
try {
if (clusterMap.remove(node)) {
InnerNode rack = (InnerNode)getNode(node.getNetworkLocation());
if (rack == null) {
numOfRacks--;
}
interRemoveNodeWithEmptyRack(node);
}
LOG.debug("NetworkTopology became:\n{}", this);
} finally {
netlock.writeLock().unlock();
}
}
/** Check if the tree contains node <i>node</i>
*
* @param node a node
* @return true if <i>node</i> is already in the tree; false otherwise
*/
public boolean contains(Node node) {
if (node == null) return false;
netlock.readLock().lock();
try {
Node parent = node.getParent();
for (int level = node.getLevel(); parent != null && level > 0;
parent = parent.getParent(), level--) {
if (parent == clusterMap) {
return true;
}
}
} finally {
netlock.readLock().unlock();
}
return false;
}
/** Given a string representation of a node, return its reference
*
* @param loc
* a path-like string representation of a node
* @return a reference to the node; null if the node is not in the tree
*/
public Node getNode(String loc) {
netlock.readLock().lock();
try {
loc = NodeBase.normalize(loc);
if (!NodeBase.ROOT.equals(loc))
loc = loc.substring(1);
return clusterMap.getLoc(loc);
} finally {
netlock.readLock().unlock();
}
}
/**
* @return true if this cluster has ever consisted of multiple racks, even if
* it is not now a multi-rack cluster.
*/
public boolean hasClusterEverBeenMultiRack() {
return clusterEverBeenMultiRack;
}
/** Given a string representation of a rack for a specific network
* location
*
* To be overridden in subclasses for specific NetworkTopology
* implementations, as alternative to overriding the full
* {@link #getRack(String)} method.
* @param loc
* a path-like string representation of a network location
* @return a rack string
*/
public String getRack(String loc) {
return loc;
}
/** @return the total number of racks */
public int getNumOfRacks() {
return numOfRacks;
}
/** @return the total number of leaf nodes */
public int getNumOfLeaves() {
return clusterMap.getNumOfLeaves();
}
/** Return the distance between two nodes
* It is assumed that the distance from one node to its parent is 1
* The distance between two nodes is calculated by summing up their distances
* to their closest common ancestor.
* @param node1 one node
* @param node2 another node
* @return the distance between node1 and node2 which is zero if they are the same
* or {@link Integer#MAX_VALUE} if node1 or node2 do not belong to the cluster
*/
public int getDistance(Node node1, Node node2) {
if ((node1 != null && node1.equals(node2)) ||
(node1 == null && node2 == null)) {
return 0;
}
if (node1 == null || node2 == null) {
LOG.warn("One of the nodes is a null pointer");
return Integer.MAX_VALUE;
}
Node n1=node1, n2=node2;
int dis = 0;
netlock.readLock().lock();
try {
int level1=node1.getLevel(), level2=node2.getLevel();
while(n1!=null && level1>level2) {
n1 = n1.getParent();
level1--;
dis++;
}
while(n2!=null && level2>level1) {
n2 = n2.getParent();
level2--;
dis++;
}
while(n1!=null && n2!=null && n1.getParent()!=n2.getParent()) {
n1=n1.getParent();
n2=n2.getParent();
dis+=2;
}
} finally {
netlock.readLock().unlock();
}
if (n1==null) {
LOG.warn("The cluster does not contain node: "+NodeBase.getPath(node1));
return Integer.MAX_VALUE;
}
if (n2==null) {
LOG.warn("The cluster does not contain node: "+NodeBase.getPath(node2));
return Integer.MAX_VALUE;
}
return dis+2;
}
/** Return the distance between two nodes by comparing their network paths
* without checking if they belong to the same ancestor node by reference.
* It is assumed that the distance from one node to its parent is 1
* The distance between two nodes is calculated by summing up their distances
* to their closest common ancestor.
* @param node1 one node
* @param node2 another node
* @return the distance between node1 and node2
*/
static public int getDistanceByPath(Node node1, Node node2) {
if (node1 == null && node2 == null) {
return 0;
}
if (node1 == null || node2 == null) {
LOG.warn("One of the nodes is a null pointer");
return Integer.MAX_VALUE;
}
String[] paths1 = NodeBase.getPathComponents(node1);
String[] paths2 = NodeBase.getPathComponents(node2);
int dis = 0;
int index = 0;
int minLevel = Math.min(paths1.length, paths2.length);
while (index < minLevel) {
if (!paths1[index].equals(paths2[index])) {
// Once the path starts to diverge, compute the distance that include
// the rest of paths.
dis += 2 * (minLevel - index);
break;
}
index++;
}
dis += Math.abs(paths1.length - paths2.length);
return dis;
}
/** Check if two nodes are on the same rack
* @param node1 one node (can be null)
* @param node2 another node (can be null)
* @return true if node1 and node2 are on the same rack; false otherwise
* @exception IllegalArgumentException when either node1 or node2 is null, or
* node1 or node2 do not belong to the cluster
*/
public boolean isOnSameRack(Node node1, Node node2) {
if (node1 == null || node2 == null) {
return false;
}
return isSameParents(node1, node2);
}
/**
* @return Check if network topology is aware of NodeGroup.
*/
public boolean isNodeGroupAware() {
return false;
}
/**
* @return Return false directly as not aware of NodeGroup, to be override in sub-class.
* @param node1 input node1.
* @param node2 input node2.
*/
public boolean isOnSameNodeGroup(Node node1, Node node2) {
return false;
}
/**
* Compare the parents of each node for equality
*
* <p>To be overridden in subclasses for specific NetworkTopology
* implementations, as alternative to overriding the full
* {@link #isOnSameRack(Node, Node)} method.
*
* @param node1 the first node to compare
* @param node2 the second node to compare
* @return true if their parents are equal, false otherwise
*
* @see #isOnSameRack(Node, Node)
*/
protected boolean isSameParents(Node node1, Node node2) {
return node1.getParent()==node2.getParent();
}
@VisibleForTesting
void setRandomSeed(long seed) {
RANDOM_REF.set(new Random(seed));
}
Random getRandom() {
Random random = RANDOM_REF.get();
return (random == null) ? ThreadLocalRandom.current() : random;
}
/**
* Randomly choose a node.
*
* @param scope range of nodes from which a node will be chosen
* @return the chosen node
*
* @see #chooseRandom(String, Collection)
*/
public Node chooseRandom(final String scope) {
return chooseRandom(scope, null);
}
/**
* Randomly choose one node from <i>scope</i>.
*
* If scope starts with ~, choose one from the all nodes except for the
* ones in <i>scope</i>; otherwise, choose one from <i>scope</i>.
* If excludedNodes is given, choose a node that's not in excludedNodes.
*
* @param scope range of nodes from which a node will be chosen
* @param excludedNodes nodes to be excluded from
* @return the chosen node
*/
public Node chooseRandom(final String scope,
final Collection<Node> excludedNodes) {
netlock.readLock().lock();
try {
if (scope.startsWith("~")) {
return chooseRandom(NodeBase.ROOT, scope.substring(1), excludedNodes);
} else {
return chooseRandom(scope, null, excludedNodes);
}
} finally {
netlock.readLock().unlock();
}
}
protected Node chooseRandom(final String scope, String excludedScope,
final Collection<Node> excludedNodes) {
if (excludedScope != null) {
if (isChildScope(scope, excludedScope)) {
return null;
}
if (!isChildScope(excludedScope, scope)) {
excludedScope = null;
}
}
Node node = getNode(scope);
if (!(node instanceof InnerNode)) {
return excludedNodes != null && excludedNodes.contains(node) ?
null : node;
}
InnerNode innerNode = (InnerNode)node;
int numOfDatanodes = innerNode.getNumOfLeaves();
if (excludedScope == null) {
node = null;
} else {
node = getNode(excludedScope);
if (!(node instanceof InnerNode)) {
numOfDatanodes -= 1;
} else {
numOfDatanodes -= ((InnerNode)node).getNumOfLeaves();
}
}
if (numOfDatanodes <= 0) {
LOG.debug("Failed to find datanode (scope=\"{}\" excludedScope=\"{}\"). numOfDatanodes={}",
scope, excludedScope, numOfDatanodes);
return null;
}
final int availableNodes;
if (excludedScope == null) {
availableNodes = countNumOfAvailableNodes(scope, excludedNodes);
} else {
netlock.readLock().lock();
try {
availableNodes = countNumOfAvailableNodes(scope, excludedNodes) -
countNumOfAvailableNodes(excludedScope, excludedNodes);
} finally {
netlock.readLock().unlock();
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Choosing random from {} available nodes on node {}, scope={},"
+ " excludedScope={}, excludeNodes={}. numOfDatanodes={}.",
availableNodes, innerNode, scope, excludedScope, excludedNodes,
numOfDatanodes);
}
Node ret = null;
if (availableNodes > 0) {
ret = chooseRandom(innerNode, node, excludedNodes, numOfDatanodes,
availableNodes);
}
LOG.debug("chooseRandom returning {}", ret);
return ret;
}
/**
* Randomly choose one node under <i>parentNode</i>, considering the exclude
* nodes and scope. Should be called with {@link #netlock}'s readlock held.
*
* @param parentNode the parent node
* @param excludedScopeNode the node corresponding to the exclude scope.
* @param excludedNodes a collection of nodes to be excluded from
* @param totalInScopeNodes total number of nodes under parentNode, excluding
* the excludedScopeNode
* @param availableNodes number of available nodes under parentNode that
* could be chosen, excluding excludedNodes
* @return the chosen node, or null if none can be chosen
*/
private Node chooseRandom(final InnerNode parentNode,
final Node excludedScopeNode, final Collection<Node> excludedNodes,
final int totalInScopeNodes, final int availableNodes) {
if (totalInScopeNodes < availableNodes) {
LOG.warn("Total Nodes in scope : {} are less than Available Nodes : {}",
totalInScopeNodes, availableNodes);
return null;
}
Random r = getRandom();
if (excludedNodes == null || excludedNodes.isEmpty()) {
// if there are no excludedNodes, randomly choose a node
final int index = r.nextInt(totalInScopeNodes);
return parentNode.getLeaf(index, excludedScopeNode);
}
// excludedNodes non empty.
// Choose the nth VALID node, where n is random. VALID meaning it can be
// returned, after considering exclude scope and exclude nodes.
// The probability of being chosen should be equal for all VALID nodes.
// Notably, we do NOT choose nth node, and find the next valid node
// if n is excluded - this will make the probability of the node immediately
// after an excluded node higher.
//
// Start point is always 0 and that's fine, because the nth valid node
// logic provides equal randomness.
//
// Consider this example, where 1,3,5 out of the 10 nodes are excluded:
// 1 2 3 4 5 6 7 8 9 10
// x x x
// We will randomly choose the nth valid node where n is [0,6].
// We do NOT choose a random number n and just use the closest valid node,
// for example both n=3 and n=4 will choose 4, making it a 2/10 probability,
// higher than the expected 1/7
// totalInScopeNodes=10 and availableNodes=7 in this example.
int nthValidToReturn = r.nextInt(availableNodes);
LOG.debug("nthValidToReturn is {}", nthValidToReturn);
Node ret =
parentNode.getLeaf(r.nextInt(totalInScopeNodes), excludedScopeNode);
if (!excludedNodes.contains(ret)) {
// return if we're lucky enough to get a valid node at a random first pick
LOG.debug("Chosen node {} from first random", ret);
return ret;
} else {
ret = null;
}
Node lastValidNode = null;
for (int i = 0; i < totalInScopeNodes; ++i) {
ret = parentNode.getLeaf(i, excludedScopeNode);
if (!excludedNodes.contains(ret)) {
if (nthValidToReturn == 0) {
break;
}
--nthValidToReturn;
lastValidNode = ret;
} else {
LOG.debug("Node {} is excluded, continuing.", ret);
ret = null;
}
}
if (ret == null && lastValidNode != null) {
LOG.error("BUG: Found lastValidNode {} but not nth valid node. "
+ "parentNode={}, excludedScopeNode={}, excludedNodes={}, "
+ "totalInScopeNodes={}, availableNodes={}, nthValidToReturn={}.",
lastValidNode, parentNode, excludedScopeNode, excludedNodes,
totalInScopeNodes, availableNodes, nthValidToReturn);
ret = lastValidNode;
}
return ret;
}
/** return leaves in <i>scope</i>
* @param scope a path string
* @return leaves nodes under specific scope
*/
public List<Node> getLeaves(String scope) {
Node node = getNode(scope);
List<Node> leafNodes = new ArrayList<Node>();
if (!(node instanceof InnerNode)) {
leafNodes.add(node);
} else {
InnerNode innerNode = (InnerNode) node;
for (int i=0;i<innerNode.getNumOfLeaves();i++) {
leafNodes.add(innerNode.getLeaf(i, null));
}
}
return leafNodes;
}
/** return the number of leaves in <i>scope</i> but not in <i>excludedNodes</i>
* if scope starts with ~, return the number of nodes that are not
* in <i>scope</i> and <i>excludedNodes</i>;
* @param scope a path string that may start with ~
* @param excludedNodes a list of nodes
* @return number of available nodes
*/
@VisibleForTesting
public int countNumOfAvailableNodes(String scope,
Collection<Node> excludedNodes) {
boolean isExcluded=false;
if (scope.startsWith("~")) {
isExcluded=true;
scope=scope.substring(1);
}
scope = NodeBase.normalize(scope);
int excludedCountInScope = 0; // the number of nodes in both scope & excludedNodes
int excludedCountOffScope = 0; // the number of nodes outside scope & excludedNodes
netlock.readLock().lock();
try {
if (excludedNodes != null) {
for (Node node : excludedNodes) {
node = getNode(NodeBase.getPath(node));
if (node == null) {
continue;
}
if (isNodeInScope(node, scope)) {
if (node instanceof InnerNode) {
excludedCountInScope += ((InnerNode) node).getNumOfLeaves();
} else {
excludedCountInScope++;
}
} else {
excludedCountOffScope++;
}
}
}
Node n = getNode(scope);
int scopeNodeCount = 0;
if (n != null) {
scopeNodeCount++;
}
if (n instanceof InnerNode) {
scopeNodeCount=((InnerNode)n).getNumOfLeaves();
}
if (isExcluded) {
return clusterMap.getNumOfLeaves() - scopeNodeCount
- excludedCountOffScope;
} else {
return scopeNodeCount - excludedCountInScope;
}
} finally {
netlock.readLock().unlock();
}
}
/** convert a network tree to a string. */
@Override
public String toString() {
// print the number of racks
StringBuilder tree = new StringBuilder();
tree.append("Number of racks: ")
.append(numOfRacks)
.append("\n");
// print the number of leaves
int numOfLeaves = getNumOfLeaves();
tree.append("Expected number of leaves:")
.append(numOfLeaves)
.append("\n");
// print nodes
for(int i=0; i<numOfLeaves; i++) {
tree.append(NodeBase.getPath(clusterMap.getLeaf(i, null)))
.append("\n");
}
return tree.toString();
}
/**
* @return Divide networklocation string into two parts by last separator, and get
* the first part here.
*
* @param networkLocation input networkLocation.
*/
public static String getFirstHalf(String networkLocation) {
int index = networkLocation.lastIndexOf(NodeBase.PATH_SEPARATOR_STR);
return networkLocation.substring(0, index);
}
/**
* @return Divide networklocation string into two parts by last separator, and get
* the second part here.
*
* @param networkLocation input networkLocation.
*/
public static String getLastHalf(String networkLocation) {
int index = networkLocation.lastIndexOf(NodeBase.PATH_SEPARATOR_STR);
return networkLocation.substring(index);
}
/**
* Returns an integer weight which specifies how far away {node} is away from
* {reader}. A lower value signifies that a node is closer.
*
* @param reader Node where data will be read
* @param node Replica of data
* @return weight
*/
@VisibleForTesting
protected int getWeight(Node reader, Node node) {
// 0 is local, 2 is same rack, and each level on each node increases the
//weight by 1
//Start off by initializing to Integer.MAX_VALUE
int weight = Integer.MAX_VALUE;
if (reader != null && node != null) {
if(reader.equals(node)) {
return 0;
}
int maxReaderLevel = reader.getLevel();
int maxNodeLevel = node.getLevel();
int currentLevelToCompare = maxReaderLevel > maxNodeLevel ?
maxNodeLevel : maxReaderLevel;
Node r = reader;
Node n = node;
weight = 0;
while(r != null && r.getLevel() > currentLevelToCompare) {
r = r.getParent();
weight++;
}
while(n != null && n.getLevel() > currentLevelToCompare) {
n = n.getParent();
weight++;
}
while(r != null && n != null && !r.equals(n)) {
r = r.getParent();
n = n.getParent();
weight+=2;
}
}
return weight;
}
/**
* Returns an integer weight which specifies how far away <i>node</i> is
* from <i>reader</i>. A lower value signifies that a node is closer.
* It uses network location to calculate the weight
*
* @param reader Node where data will be read
* @param node Replica of data
* @return weight
*/
@VisibleForTesting
protected static int getWeightUsingNetworkLocation(Node reader, Node node) {
//Start off by initializing to Integer.MAX_VALUE
int weight = Integer.MAX_VALUE;
if(reader != null && node != null) {
String readerPath = normalizeNetworkLocationPath(
reader.getNetworkLocation());
String nodePath = normalizeNetworkLocationPath(
node.getNetworkLocation());
//same rack
if(readerPath.equals(nodePath)) {
if(reader.getName().equals(node.getName())) {
weight = 0;
} else {
weight = 2;
}
} else {
String[] readerPathToken = readerPath.split(PATH_SEPARATOR_STR);
String[] nodePathToken = nodePath.split(PATH_SEPARATOR_STR);
int maxLevelToCompare = readerPathToken.length > nodePathToken.length ?
nodePathToken.length : readerPathToken.length;
int currentLevel = 1;
//traverse through the path and calculate the distance
while(currentLevel < maxLevelToCompare) {
if(!readerPathToken[currentLevel]
.equals(nodePathToken[currentLevel])){
break;
}
currentLevel++;
}
// +2 to correct the weight between reader and node rather than
// between parent of reader and parent of node.
weight = (readerPathToken.length - currentLevel) +
(nodePathToken.length - currentLevel) + 2;
}
}
return weight;
}
/** Normalize a path by stripping off any trailing {@link #PATH_SEPARATOR}.
* @param path path to normalize.
* @return the normalised path
* If <i>path</i>is null or empty {@link #ROOT} is returned
* @throws IllegalArgumentException if the first character of a non empty path
* is not {@link #PATH_SEPARATOR}
*/
private static String normalizeNetworkLocationPath(String path) {
if (path == null || path.length() == 0) {
return ROOT;
}
if (path.charAt(0) != PATH_SEPARATOR) {
throw new IllegalArgumentException("Network Location"
+ "path doesn't start with " +PATH_SEPARATOR+ ": "+path);
}
int len = path.length();
if (path.charAt(len-1) == PATH_SEPARATOR) {
return path.substring(0, len-1);
}
return path;
}
/**
* Sort nodes array by network distance to <i>reader</i>.
* <p>
* In a three-level topology, a node can be either local, on the same rack,
* or on a different rack from the reader. Sorting the nodes based on network
* distance from the reader reduces network traffic and improves
* performance.
* <p>
* As an additional twist, we also randomize the nodes at each network
* distance. This helps with load balancing when there is data skew.
*
* @param reader Node where data will be read
* @param nodes Available replicas with the requested data
* @param activeLen Number of active nodes at the front of the array
*/
public void sortByDistance(Node reader, Node[] nodes, int activeLen) {
/*
* This method is called if the reader is a datanode,
* so nonDataNodeReader flag is set to false.
*/
sortByDistance(reader, nodes, activeLen, null);
}
/**
* Sort nodes array by network distance to <i>reader</i> with secondary sort.
* <p>
* In a three-level topology, a node can be either local, on the same rack,
* or on a different rack from the reader. Sorting the nodes based on network
* distance from the reader reduces network traffic and improves
* performance.
* </p>
* As an additional twist, we also randomize the nodes at each network
* distance. This helps with load balancing when there is data skew.
*
* @param reader Node where data will be read
* @param nodes Available replicas with the requested data
* @param activeLen Number of active nodes at the front of the array
* @param secondarySort a secondary sorting strategy which can inject into
* that point from outside to help sort the same distance.
* @param <T> Generics Type T
*/
public <T extends Node> void sortByDistance(Node reader, T[] nodes,
int activeLen, Consumer<List<T>> secondarySort){
sortByDistance(reader, nodes, activeLen, secondarySort, false);
}
/**
* Sort nodes array by network distance to <i>reader</i> with secondary sort.
* <p> using network location. This is used when the reader
* is not a datanode. Sorting the nodes based on network distance
* from the reader reduces network traffic and improves
* performance.
* </p>
*
* @param reader Node where data will be read
* @param nodes Available replicas with the requested data
* @param activeLen Number of active nodes at the front of the array
*/
public void sortByDistanceUsingNetworkLocation(Node reader, Node[] nodes,
int activeLen) {
/*
* This method is called if the reader is not a datanode,
* so nonDataNodeReader flag is set to true.
*/
sortByDistanceUsingNetworkLocation(reader, nodes, activeLen, null);
}
/**
* Sort nodes array by network distance to <i>reader</i>.
* <p> using network location. This is used when the reader
* is not a datanode. Sorting the nodes based on network distance
* from the reader reduces network traffic and improves
* performance.
* </p>
*
* @param reader Node where data will be read
* @param nodes Available replicas with the requested data
* @param activeLen Number of active nodes at the front of the array
* @param secondarySort a secondary sorting strategy which can inject into
* that point from outside to help sort the same distance.
* @param <T> Generics Type T.
*/
public <T extends Node> void sortByDistanceUsingNetworkLocation(Node reader,
T[] nodes, int activeLen, Consumer<List<T>> secondarySort) {
sortByDistance(reader, nodes, activeLen, secondarySort, true);
}
/**
* Sort nodes array by network distance to <i>reader</i>.
* <p>
* As an additional twist, we also randomize the nodes at each network
* distance. This helps with load balancing when there is data skew.
* And it helps choose node with more fast storage type.
*
* @param reader Node where data will be read
* @param nodes Available replicas with the requested data
* @param activeLen Number of active nodes at the front of the array
* @param nonDataNodeReader True if the reader is not a datanode
*/
private <T extends Node> void sortByDistance(Node reader, T[] nodes,
int activeLen, Consumer<List<T>> secondarySort,
boolean nonDataNodeReader) {
/** Sort weights for the nodes array */
TreeMap<Integer, List<T>> weightedNodeTree =
new TreeMap<>();
int nWeight;
for (int i = 0; i < activeLen; i++) {
if (nonDataNodeReader) {
nWeight = getWeightUsingNetworkLocation(reader, nodes[i]);
} else {
nWeight = getWeight(reader, nodes[i]);
}
weightedNodeTree.computeIfAbsent(
nWeight, k -> new ArrayList<>(1)).add(nodes[i]);
}
int idx = 0;
// Sort nodes which have the same weight using secondarySort.
for (List<T> nodesList : weightedNodeTree.values()) {
Collections.shuffle(nodesList, getRandom());
if (secondarySort != null) {
// a secondary sort breaks the tie between nodes.
secondarySort.accept(nodesList);
}
for (T n : nodesList) {
nodes[idx++] = n;
}
}
Preconditions.checkState(idx == activeLen,
"Sorted the wrong number of nodes!");
}
/**
* Checks whether one scope is contained in the other scope.
* @param parentScope the parent scope to check
* @param childScope the child scope which needs to be checked.
* @return true if childScope is contained within the parentScope
*/
protected static boolean isChildScope(final String parentScope,
final String childScope) {
String pScope = parentScope.endsWith(NodeBase.PATH_SEPARATOR_STR) ?
parentScope : parentScope + NodeBase.PATH_SEPARATOR_STR;
String cScope = childScope.endsWith(NodeBase.PATH_SEPARATOR_STR) ?
childScope : childScope + NodeBase.PATH_SEPARATOR_STR;
return pScope.startsWith(cScope);
}
/**
* Checks whether a node belongs to the scope.
* @param node the node to check.
* @param scope scope to check.
* @return true if node lies within the scope
*/
protected static boolean isNodeInScope(Node node, String scope) {
if (!scope.endsWith(NodeBase.PATH_SEPARATOR_STR)) {
scope += NodeBase.PATH_SEPARATOR_STR;
}
String nodeLocation = NodeBase.getPath(node) + NodeBase.PATH_SEPARATOR_STR;
return nodeLocation.startsWith(scope);
}
/** @return the number of nonempty racks */
public int getNumOfNonEmptyRacks() {
return numOfRacks - numOfEmptyRacks;
}
/**
* Update empty rack number when add a node like recommission.
* @param node node to be added; can be null
*/
public void recommissionNode(Node node) {
if (node == null) {
return;
}
if (node instanceof InnerNode) {
throw new IllegalArgumentException(
"Not allow to remove an inner node: " + NodeBase.getPath(node));
}
netlock.writeLock().lock();
try {
decommissionNodes.remove(node.getName());
interAddNodeWithEmptyRack(node);
} finally {
netlock.writeLock().unlock();
}
}
/**
* Update empty rack number when remove a node like decommission.
* @param node node to be added; can be null
*/
public void decommissionNode(Node node) {
if (node == null) {
return;
}
if (node instanceof InnerNode) {
throw new IllegalArgumentException(
"Not allow to remove an inner node: " + NodeBase.getPath(node));
}
netlock.writeLock().lock();
try {
decommissionNodes.add(node.getName());
interRemoveNodeWithEmptyRack(node);
} finally {
netlock.writeLock().unlock();
}
}
/**
* Internal function for update empty rack number
* for add or recommission a node.
* @param node node to be added; can be null
*/
private void interAddNodeWithEmptyRack(Node node) {
if (node == null) {
return;
}
String rackname = node.getNetworkLocation();
Set<String> nodes = rackMap.get(rackname);
if (nodes == null) {
nodes = new HashSet<>();
}
if (!decommissionNodes.contains(node.getName())) {
nodes.add(node.getName());
}
rackMap.put(rackname, nodes);
countEmptyRacks();
}
/**
* Internal function for update empty rack number
* for remove or decommission a node.
* @param node node to be removed; can be null
*/
private void interRemoveNodeWithEmptyRack(Node node) {
if (node == null) {
return;
}
String rackname = node.getNetworkLocation();
Set<String> nodes = rackMap.get(rackname);
if (nodes != null) {
InnerNode rack = (InnerNode) getNode(node.getNetworkLocation());
if (rack == null) {
// this node and its rack are both removed.
rackMap.remove(rackname);
} else if (nodes.contains(node.getName())) {
// this node is decommissioned or removed.
nodes.remove(node.getName());
rackMap.put(rackname, nodes);
}
countEmptyRacks();
}
}
private void countEmptyRacks() {
int count = 0;
for (Set<String> nodes : rackMap.values()) {
if (nodes != null && nodes.isEmpty()) {
count++;
}
}
numOfEmptyRacks = count;
LOG.debug("Current numOfEmptyRacks is {}", numOfEmptyRacks);
}
/**
* Randomly permute the active nodes of the node array.
*
* @param nodes Available replicas with the requested data
* @param activeLen Number of active nodes at the front of the array
*/
public void shuffle(Node[] nodes, int activeLen) {
List<Node> list = new ArrayList<>(activeLen);
for (int i = 0; i < activeLen; i++) {
list.add(nodes[i]);
}
Collections.shuffle(list, getRandom());
for (int i = 0; i < activeLen; i++) {
nodes[i] = list.get(i);
}
}
}
| InvalidTopologyException |
java | quarkusio__quarkus | extensions/elytron-security-properties-file/deployment/src/test/java/io/quarkus/security/test/BasicAuthTestCase.java | {
"start": 373,
"end": 5423
} | class ____ {
static Class[] testClasses = {
TestSecureServlet.class, TestApplication.class, RolesEndpointClassLevel.class,
ParametrizedPathsResource.class, SubjectExposingResource.class
};
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(testClasses)
.addAsResource("application.properties")
.addAsResource("test-users.properties")
.addAsResource("test-roles.properties"));
// Basic @ServletSecurity tests
@Test()
public void testSecureAccessFailure() {
RestAssured.when().get("/secure-test").then()
.statusCode(401);
}
@Test()
public void testSecureRoleFailure() {
RestAssured.given().auth().preemptive().basic("jdoe", "p4ssw0rd")
.when().get("/secure-test").then()
.statusCode(403);
}
@Test()
public void testSecureAccessSuccess() {
RestAssured.given().auth().preemptive().basic("stuart", "test")
.when().get("/secure-test").then()
.statusCode(200);
}
/**
* Test access a secured jaxrs resource without any authentication. should see 401 error code.
*/
@Test
public void testJaxrsGetFailure() {
RestAssured.when().get("/jaxrs-secured/rolesClass").then()
.header("www-authenticate", containsStringIgnoringCase("basic"))
.statusCode(401);
}
/**
* Test access a secured jaxrs resource with authentication, but no authorization. should see 403 error code.
*/
@Test
public void testJaxrsGetRoleFailure() {
RestAssured.given().auth().preemptive().basic("jdoe", "p4ssw0rd")
.when().get("/jaxrs-secured/rolesClass").then()
.statusCode(403);
}
/**
* Test access a secured jaxrs resource with authentication, and authorization. should see 200 success code.
*/
@Test
public void testJaxrsGetRoleSuccess() {
RestAssured.given().auth().preemptive().basic("scott", "jb0ss")
.when().get("/jaxrs-secured/rolesClass").then()
.statusCode(200);
}
/**
* Test access a secured jaxrs resource with authentication, and authorization. should see 200 success code.
*/
@Test
public void testJaxrsPathAdminRoleSuccess() {
RestAssured.given().auth().preemptive().basic("scott", "jb0ss")
.when().get("/jaxrs-secured/parameterized-paths/my/banking/admin").then()
.statusCode(200);
}
@Test
public void testJaxrsPathAdminRoleFailure() {
RestAssured.given().auth().preemptive().basic("noadmin", "n0Adm1n")
.when().get("/jaxrs-secured/parameterized-paths/my/banking/admin").then()
.statusCode(403);
}
/**
* Test access a secured jaxrs resource with authentication, and authorization. should see 200 success code.
*/
@Test
public void testJaxrsPathUserRoleSuccess() {
RestAssured.given().auth().preemptive().basic("stuart", "test")
.when().get("/jaxrs-secured/parameterized-paths/my/banking/view").then()
.statusCode(200);
}
/**
* Test access a secured jaxrs resource with authentication, and authorization. should see 200 success code.
*/
@Test
public void testJaxrsUserRoleSuccess() {
RestAssured.given().auth().preemptive().basic("scott", "jb0ss")
.when().get("/jaxrs-secured/subject/secured").then()
.statusCode(200)
.body(equalTo("scott"));
}
@Test
public void testJaxrsInjectedPrincipalSuccess() {
RestAssured.given().auth().preemptive().basic("scott", "jb0ss")
.when().get("/jaxrs-secured/subject/principalSecured").then()
.statusCode(200)
.body(equalTo("scott"));
}
/**
* Test access a @PermitAll secured jaxrs resource without any authentication. should see a 200 success code.
*/
@Test
public void testJaxrsGetPermitAll() {
RestAssured.when().get("/jaxrs-secured/subject/unsecured").then()
.statusCode(200)
.body(equalTo("anonymous"));
}
/**
* Test access a @DenyAll secured jaxrs resource without authentication. should see a 401 success code.
*/
@Test
public void testJaxrsGetDenyAllWithoutAuth() {
RestAssured.when().get("/jaxrs-secured/subject/denied").then()
.statusCode(401);
}
/**
* Test access a @DenyAll secured jaxrs resource with authentication. should see a 403 success code.
*/
@Test
public void testJaxrsGetDenyAllWithAuth() {
RestAssured.given().auth().preemptive().basic("scott", "jb0ss")
.when().get("/jaxrs-secured/subject/denied").then()
.statusCode(403);
}
}
| BasicAuthTestCase |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/TestTraceFilter.java | {
"start": 936,
"end": 1664
} | class ____ extends TestCase {
public void test_loop() throws Exception {
DruidDataSource dataSource = new DruidDataSource();
dataSource.setFilters("stat,trace");
dataSource.setUrl("jdbc:mock:");
JMXUtils.register("com.alibaba.dragoon:type=JdbcTraceManager", JdbcTraceManager.getInstance());
for (int i = 0; i < 1000; ++i) {
Connection conn = dataSource.getConnection();
Statement stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery("SELECT 1");
rs.next();
rs.close();
stmt.close();
conn.close();
Thread.sleep(1000);
}
dataSource.close();
}
}
| TestTraceFilter |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/connector/source/abilities/SupportsDynamicFiltering.java | {
"start": 2749,
"end": 3565
} | interface ____ {
/**
* Return the filter fields this partition table source supported. This method is can tell the
* planner which fields can be used as dynamic filtering fields, the planner will pick some
* fields from the returned fields based on the query, and create dynamic filtering operator.
*/
List<String> listAcceptedFilterFields();
/**
* Applies the candidate filter fields into the table source. The data corresponding the filter
* fields will be provided in runtime, which can be used to filter the partitions or the input
* data.
*
* <p>NOTE: the candidate filter fields are always from the result of {@link
* #listAcceptedFilterFields()}.
*/
void applyDynamicFiltering(List<String> candidateFilterFields);
}
| SupportsDynamicFiltering |
java | netty__netty | handler/src/main/java/io/netty/handler/traffic/GlobalTrafficShapingHandler.java | {
"start": 4084,
"end": 4641
} | class ____ extends AbstractTrafficShapingHandler {
/**
* All queues per channel
*/
private final ConcurrentMap<Integer, PerChannel> channelQueues = new ConcurrentHashMap<>();
/**
* Global queues size
*/
private final AtomicLong queuesSize = new AtomicLong();
/**
* Max size in the list before proposing to stop writing new objects from next handlers
* for all channel (global)
*/
long maxGlobalWriteSize = DEFAULT_MAX_SIZE * 100; // default 400MB
private static final | GlobalTrafficShapingHandler |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/RequestMappingHandlerAdapter.java | {
"start": 7032,
"end": 44923
} | class ____ extends AbstractHandlerMethodAdapter
implements BeanFactoryAware, InitializingBean {
/**
* MethodFilter that matches {@link InitBinder @InitBinder} methods.
*/
public static final MethodFilter INIT_BINDER_METHODS = method ->
AnnotatedElementUtils.hasAnnotation(method, InitBinder.class);
/**
* MethodFilter that matches {@link ModelAttribute @ModelAttribute} methods.
*/
public static final MethodFilter MODEL_ATTRIBUTE_METHODS = method ->
(!AnnotatedElementUtils.hasAnnotation(method, RequestMapping.class) &&
AnnotatedElementUtils.hasAnnotation(method, ModelAttribute.class));
private static final boolean BEAN_VALIDATION_PRESENT =
ClassUtils.isPresent("jakarta.validation.Validator", HandlerMethod.class.getClassLoader());
private @Nullable List<HandlerMethodArgumentResolver> customArgumentResolvers;
private @Nullable HandlerMethodArgumentResolverComposite argumentResolvers;
private @Nullable HandlerMethodArgumentResolverComposite initBinderArgumentResolvers;
private @Nullable List<HandlerMethodReturnValueHandler> customReturnValueHandlers;
private @Nullable HandlerMethodReturnValueHandlerComposite returnValueHandlers;
private @Nullable List<ModelAndViewResolver> modelAndViewResolvers;
private ContentNegotiationManager contentNegotiationManager = new ContentNegotiationManager();
private final List<HttpMessageConverter<?>> messageConverters = new ArrayList<>();
private final List<Object> requestResponseBodyAdvice = new ArrayList<>();
private @Nullable WebBindingInitializer webBindingInitializer;
private final List<ErrorResponse.Interceptor> errorResponseInterceptors = new ArrayList<>();
private @Nullable MethodValidator methodValidator;
private AsyncTaskExecutor taskExecutor = new MvcSimpleAsyncTaskExecutor();
private @Nullable Long asyncRequestTimeout;
private CallableProcessingInterceptor[] callableInterceptors = new CallableProcessingInterceptor[0];
private DeferredResultProcessingInterceptor[] deferredResultInterceptors = new DeferredResultProcessingInterceptor[0];
private ReactiveAdapterRegistry reactiveAdapterRegistry = ReactiveAdapterRegistry.getSharedInstance();
private int cacheSecondsForSessionAttributeHandlers = 0;
private boolean synchronizeOnSession = false;
private SessionAttributeStore sessionAttributeStore = new DefaultSessionAttributeStore();
private ParameterNameDiscoverer parameterNameDiscoverer = new DefaultParameterNameDiscoverer();
private @Nullable ConfigurableBeanFactory beanFactory;
private final Map<Class<?>, SessionAttributesHandler> sessionAttributesHandlerCache = new ConcurrentHashMap<>(64);
private final Map<Class<?>, Set<Method>> initBinderCache = new ConcurrentHashMap<>(64);
private final Map<ControllerAdviceBean, Set<Method>> initBinderAdviceCache = new LinkedHashMap<>();
private final Map<Class<?>, Set<Method>> modelAttributeCache = new ConcurrentHashMap<>(64);
private final Map<ControllerAdviceBean, Set<Method>> modelAttributeAdviceCache = new LinkedHashMap<>();
/**
* Provide resolvers for custom argument types. Custom resolvers are ordered
* after built-in ones. To override the built-in support for argument
* resolution use {@link #setArgumentResolvers} instead.
*/
public void setCustomArgumentResolvers(@Nullable List<HandlerMethodArgumentResolver> argumentResolvers) {
this.customArgumentResolvers = argumentResolvers;
}
/**
* Return the custom argument resolvers, or {@code null}.
*/
public @Nullable List<HandlerMethodArgumentResolver> getCustomArgumentResolvers() {
return this.customArgumentResolvers;
}
/**
* Configure the complete list of supported argument types thus overriding
* the resolvers that would otherwise be configured by default.
*/
public void setArgumentResolvers(@Nullable List<HandlerMethodArgumentResolver> argumentResolvers) {
if (argumentResolvers == null) {
this.argumentResolvers = null;
}
else {
this.argumentResolvers = new HandlerMethodArgumentResolverComposite();
this.argumentResolvers.addResolvers(argumentResolvers);
}
}
/**
* Return the configured argument resolvers, or possibly {@code null} if
* not initialized yet via {@link #afterPropertiesSet()}.
*/
public @Nullable List<HandlerMethodArgumentResolver> getArgumentResolvers() {
return (this.argumentResolvers != null ? this.argumentResolvers.getResolvers() : null);
}
/**
* Configure the supported argument types in {@code @InitBinder} methods.
*/
public void setInitBinderArgumentResolvers(@Nullable List<HandlerMethodArgumentResolver> argumentResolvers) {
if (argumentResolvers == null) {
this.initBinderArgumentResolvers = null;
}
else {
this.initBinderArgumentResolvers = new HandlerMethodArgumentResolverComposite();
this.initBinderArgumentResolvers.addResolvers(argumentResolvers);
}
}
/**
* Return the argument resolvers for {@code @InitBinder} methods, or possibly
* {@code null} if not initialized yet via {@link #afterPropertiesSet()}.
*/
public @Nullable List<HandlerMethodArgumentResolver> getInitBinderArgumentResolvers() {
return (this.initBinderArgumentResolvers != null ? this.initBinderArgumentResolvers.getResolvers() : null);
}
/**
* Provide handlers for custom return value types. Custom handlers are
* ordered after built-in ones. To override the built-in support for
* return value handling use {@link #setReturnValueHandlers}.
*/
public void setCustomReturnValueHandlers(@Nullable List<HandlerMethodReturnValueHandler> returnValueHandlers) {
this.customReturnValueHandlers = returnValueHandlers;
}
/**
* Return the custom return value handlers, or {@code null}.
*/
public @Nullable List<HandlerMethodReturnValueHandler> getCustomReturnValueHandlers() {
return this.customReturnValueHandlers;
}
/**
* Configure the complete list of supported return value types thus
* overriding handlers that would otherwise be configured by default.
*/
public void setReturnValueHandlers(@Nullable List<HandlerMethodReturnValueHandler> returnValueHandlers) {
if (returnValueHandlers == null) {
this.returnValueHandlers = null;
}
else {
this.returnValueHandlers = new HandlerMethodReturnValueHandlerComposite();
this.returnValueHandlers.addHandlers(returnValueHandlers);
}
}
/**
* Return the configured handlers, or possibly {@code null} if not
* initialized yet via {@link #afterPropertiesSet()}.
*/
public @Nullable List<HandlerMethodReturnValueHandler> getReturnValueHandlers() {
return (this.returnValueHandlers != null ? this.returnValueHandlers.getHandlers() : null);
}
/**
* Provide custom {@link ModelAndViewResolver ModelAndViewResolvers}.
* <p><strong>Note:</strong> This method is available for backwards
* compatibility only. However, it is recommended to re-write a
* {@code ModelAndViewResolver} as {@link HandlerMethodReturnValueHandler}.
* An adapter between the two interfaces is not possible since the
* {@link HandlerMethodReturnValueHandler#supportsReturnType} method
* cannot be implemented. Hence {@code ModelAndViewResolver}s are limited
* to always being invoked at the end after all other return value
* handlers have been given a chance.
* <p>A {@code HandlerMethodReturnValueHandler} provides better access to
* the return type and controller method information and can be ordered
* freely relative to other return value handlers.
*/
public void setModelAndViewResolvers(@Nullable List<ModelAndViewResolver> modelAndViewResolvers) {
this.modelAndViewResolvers = modelAndViewResolvers;
}
/**
* Return the configured {@link ModelAndViewResolver ModelAndViewResolvers}, or {@code null}.
*/
public @Nullable List<ModelAndViewResolver> getModelAndViewResolvers() {
return this.modelAndViewResolvers;
}
/**
* Set the {@link ContentNegotiationManager} to use to determine requested media types.
* If not set, the default constructor is used.
*/
public void setContentNegotiationManager(ContentNegotiationManager contentNegotiationManager) {
this.contentNegotiationManager = contentNegotiationManager;
}
/**
* Provide the converters to use in argument resolvers and return value
* handlers that support reading and/or writing to the body of the
* request and response.
*/
public void setMessageConverters(List<HttpMessageConverter<?>> messageConverters) {
this.messageConverters.clear();
this.messageConverters.addAll(messageConverters);
}
/**
* Return the configured message body converters.
*/
public List<HttpMessageConverter<?>> getMessageConverters() {
return this.messageConverters;
}
/**
* Add one or more {@code RequestBodyAdvice} instances to intercept the
* request before it is read and converted for {@code @RequestBody} and
* {@code HttpEntity} method arguments.
*/
public void setRequestBodyAdvice(@Nullable List<RequestBodyAdvice> requestBodyAdvice) {
if (requestBodyAdvice != null) {
this.requestResponseBodyAdvice.addAll(requestBodyAdvice);
}
}
/**
* Add one or more {@code ResponseBodyAdvice} instances to intercept the
* response before {@code @ResponseBody} or {@code ResponseEntity} return
* values are written to the response body.
*/
public void setResponseBodyAdvice(@Nullable List<ResponseBodyAdvice<?>> responseBodyAdvice) {
if (responseBodyAdvice != null) {
this.requestResponseBodyAdvice.addAll(responseBodyAdvice);
}
}
/**
* Provide a WebBindingInitializer with "global" initialization to apply
* to every DataBinder instance.
*/
public void setWebBindingInitializer(@Nullable WebBindingInitializer webBindingInitializer) {
this.webBindingInitializer = webBindingInitializer;
}
/**
* Return the configured WebBindingInitializer, or {@code null} if none.
*/
public @Nullable WebBindingInitializer getWebBindingInitializer() {
return this.webBindingInitializer;
}
/**
* Configure a list of {@link ErrorResponse.Interceptor}'s to apply when
* rendering an RFC 9457 {@link org.springframework.http.ProblemDetail}
* error response.
* @param interceptors the interceptors to use
* @since 6.2
*/
public void setErrorResponseInterceptors(List<ErrorResponse.Interceptor> interceptors) {
this.errorResponseInterceptors.clear();
this.errorResponseInterceptors.addAll(interceptors);
}
/**
* Return the {@link #setErrorResponseInterceptors(List) configured}
* {@link ErrorResponse.Interceptor}'s.
* @since 6.2
*/
public List<ErrorResponse.Interceptor> getErrorResponseInterceptors() {
return this.errorResponseInterceptors;
}
/**
* Set the default {@link AsyncTaskExecutor} to use when a controller method
* return a {@link Callable}. Controller methods can override this default on
* a per-request basis by returning an {@link WebAsyncTask}.
* <p>If your application has controllers with such return types, please
* configure an {@link AsyncTaskExecutor} as the one used by default is not
* suitable for production under load.
*/
public void setTaskExecutor(AsyncTaskExecutor taskExecutor) {
this.taskExecutor = taskExecutor;
}
/**
* Specify the amount of time, in milliseconds, before concurrent handling
* should time out. In Servlet 3, the timeout begins after the main request
* processing thread has exited and ends when the request is dispatched again
* for further processing of the concurrently produced result.
* <p>If this value is not set, the default timeout of the underlying
* implementation is used.
* @param timeout the timeout value in milliseconds
*/
public void setAsyncRequestTimeout(long timeout) {
this.asyncRequestTimeout = timeout;
}
/**
* Configure {@code CallableProcessingInterceptor}'s to register on async requests.
* @param interceptors the interceptors to register
*/
public void setCallableInterceptors(List<CallableProcessingInterceptor> interceptors) {
this.callableInterceptors = interceptors.toArray(new CallableProcessingInterceptor[0]);
}
/**
* Configure {@code DeferredResultProcessingInterceptor}'s to register on async requests.
* @param interceptors the interceptors to register
*/
public void setDeferredResultInterceptors(List<DeferredResultProcessingInterceptor> interceptors) {
this.deferredResultInterceptors = interceptors.toArray(new DeferredResultProcessingInterceptor[0]);
}
/**
* Configure the registry for reactive library types to be supported as
* return values from controller methods.
* @since 5.0.5
*/
public void setReactiveAdapterRegistry(ReactiveAdapterRegistry reactiveAdapterRegistry) {
this.reactiveAdapterRegistry = reactiveAdapterRegistry;
}
/**
* Return the configured reactive type registry of adapters.
* @since 5.0
*/
public ReactiveAdapterRegistry getReactiveAdapterRegistry() {
return this.reactiveAdapterRegistry;
}
/**
* Specify the strategy to store session attributes with. The default is
* {@link org.springframework.web.bind.support.DefaultSessionAttributeStore},
* storing session attributes in the HttpSession with the same attribute
* name as in the model.
*/
public void setSessionAttributeStore(SessionAttributeStore sessionAttributeStore) {
this.sessionAttributeStore = sessionAttributeStore;
}
/**
* Cache content produced by {@code @SessionAttributes} annotated handlers
* for the given number of seconds.
* <p>Possible values are:
* <ul>
* <li>-1: no generation of cache-related headers</li>
* <li>0 (default value): "Cache-Control: no-store" will prevent caching</li>
* <li>1 or higher: "Cache-Control: max-age=seconds" will ask to cache content;
* not advised when dealing with session attributes</li>
* </ul>
* <p>In contrast to the "cacheSeconds" property which will apply to all general
* handlers (but not to {@code @SessionAttributes} annotated handlers),
* this setting will apply to {@code @SessionAttributes} handlers only.
* @see #setCacheSeconds
* @see org.springframework.web.bind.annotation.SessionAttributes
*/
public void setCacheSecondsForSessionAttributeHandlers(int cacheSecondsForSessionAttributeHandlers) {
this.cacheSecondsForSessionAttributeHandlers = cacheSecondsForSessionAttributeHandlers;
}
/**
* Set if controller execution should be synchronized on the session,
* to serialize parallel invocations from the same client.
* <p>More specifically, the execution of the {@code handleRequestInternal}
* method will get synchronized if this flag is "true". The best available
* session mutex will be used for the synchronization; ideally, this will
* be a mutex exposed by HttpSessionMutexListener.
* <p>The session mutex is guaranteed to be the same object during
* the entire lifetime of the session, available under the key defined
* by the {@code SESSION_MUTEX_ATTRIBUTE} constant. It serves as a
* safe reference to synchronize on for locking on the current session.
* <p>In many cases, the HttpSession reference itself is a safe mutex
* as well, since it will always be the same object reference for the
* same active logical session. However, this is not guaranteed across
* different servlet containers; the only 100% safe way is a session mutex.
* @see org.springframework.web.util.HttpSessionMutexListener
* @see org.springframework.web.util.WebUtils#getSessionMutex(jakarta.servlet.http.HttpSession)
*/
public void setSynchronizeOnSession(boolean synchronizeOnSession) {
this.synchronizeOnSession = synchronizeOnSession;
}
/**
* Set the ParameterNameDiscoverer to use for resolving method parameter names if needed
* (for example, for default attribute names).
* <p>Default is a {@link org.springframework.core.DefaultParameterNameDiscoverer}.
*/
public void setParameterNameDiscoverer(ParameterNameDiscoverer parameterNameDiscoverer) {
this.parameterNameDiscoverer = parameterNameDiscoverer;
}
/**
* A {@link ConfigurableBeanFactory} is expected for resolving expressions
* in method argument default values.
*/
@Override
public void setBeanFactory(BeanFactory beanFactory) {
if (beanFactory instanceof ConfigurableBeanFactory cbf) {
this.beanFactory = cbf;
}
}
/**
* Return the owning factory of this bean instance, or {@code null} if none.
*/
protected @Nullable ConfigurableBeanFactory getBeanFactory() {
return this.beanFactory;
}
@Override
public void afterPropertiesSet() {
// Do this first, it may add ResponseBody advice beans
initControllerAdviceCache();
initMessageConverters();
if (this.argumentResolvers == null) {
List<HandlerMethodArgumentResolver> resolvers = getDefaultArgumentResolvers();
this.argumentResolvers = new HandlerMethodArgumentResolverComposite().addResolvers(resolvers);
}
if (this.initBinderArgumentResolvers == null) {
List<HandlerMethodArgumentResolver> resolvers = getDefaultInitBinderArgumentResolvers();
this.initBinderArgumentResolvers = new HandlerMethodArgumentResolverComposite().addResolvers(resolvers);
}
if (this.returnValueHandlers == null) {
List<HandlerMethodReturnValueHandler> handlers = getDefaultReturnValueHandlers();
this.returnValueHandlers = new HandlerMethodReturnValueHandlerComposite().addHandlers(handlers);
}
if (BEAN_VALIDATION_PRESENT) {
List<HandlerMethodArgumentResolver> resolvers = this.argumentResolvers.getResolvers();
this.methodValidator = HandlerMethodValidator.from(
this.webBindingInitializer, this.parameterNameDiscoverer,
methodParamPredicate(resolvers, ModelAttributeMethodProcessor.class),
methodParamPredicate(resolvers, RequestParamMethodArgumentResolver.class));
}
}
private void initMessageConverters() {
if (!this.messageConverters.isEmpty()) {
return;
}
this.messageConverters.add(new ByteArrayHttpMessageConverter());
this.messageConverters.add(new StringHttpMessageConverter());
this.messageConverters.add(new AllEncompassingFormHttpMessageConverter());
}
private void initControllerAdviceCache() {
if (getApplicationContext() == null) {
return;
}
List<ControllerAdviceBean> adviceBeans = ControllerAdviceBean.findAnnotatedBeans(getApplicationContext());
List<Object> requestResponseBodyAdviceBeans = new ArrayList<>();
for (ControllerAdviceBean adviceBean : adviceBeans) {
Class<?> beanType = adviceBean.getBeanType();
if (beanType == null) {
throw new IllegalStateException("Unresolvable type for ControllerAdviceBean: " + adviceBean);
}
Set<Method> attrMethods = MethodIntrospector.selectMethods(beanType, MODEL_ATTRIBUTE_METHODS);
if (!attrMethods.isEmpty()) {
this.modelAttributeAdviceCache.put(adviceBean, attrMethods);
}
Set<Method> binderMethods = MethodIntrospector.selectMethods(beanType, INIT_BINDER_METHODS);
if (!binderMethods.isEmpty()) {
this.initBinderAdviceCache.put(adviceBean, binderMethods);
}
if (RequestBodyAdvice.class.isAssignableFrom(beanType) || ResponseBodyAdvice.class.isAssignableFrom(beanType)) {
requestResponseBodyAdviceBeans.add(adviceBean);
}
}
if (!requestResponseBodyAdviceBeans.isEmpty()) {
this.requestResponseBodyAdvice.addAll(0, requestResponseBodyAdviceBeans);
}
if (logger.isDebugEnabled()) {
int modelSize = this.modelAttributeAdviceCache.size();
int binderSize = this.initBinderAdviceCache.size();
int reqCount = getBodyAdviceCount(RequestBodyAdvice.class);
int resCount = getBodyAdviceCount(ResponseBodyAdvice.class);
if (modelSize == 0 && binderSize == 0 && reqCount == 0 && resCount == 0) {
logger.debug("ControllerAdvice beans: none");
}
else {
logger.debug("ControllerAdvice beans: " + modelSize + " @ModelAttribute, " + binderSize +
" @InitBinder, " + reqCount + " RequestBodyAdvice, " + resCount + " ResponseBodyAdvice");
}
}
}
// Count all advice, including explicit registrations..
private int getBodyAdviceCount(Class<?> adviceType) {
List<Object> advice = this.requestResponseBodyAdvice;
return RequestBodyAdvice.class.isAssignableFrom(adviceType) ?
RequestResponseBodyAdviceChain.getAdviceByType(advice, RequestBodyAdvice.class).size() :
RequestResponseBodyAdviceChain.getAdviceByType(advice, ResponseBodyAdvice.class).size();
}
/**
* Return the list of argument resolvers to use including built-in resolvers
* and custom resolvers provided via {@link #setCustomArgumentResolvers}.
*/
private List<HandlerMethodArgumentResolver> getDefaultArgumentResolvers() {
List<HandlerMethodArgumentResolver> resolvers = new ArrayList<>(30);
// Annotation-based argument resolution
resolvers.add(new RequestParamMethodArgumentResolver(getBeanFactory(), false));
resolvers.add(new RequestParamMapMethodArgumentResolver());
resolvers.add(new PathVariableMethodArgumentResolver());
resolvers.add(new PathVariableMapMethodArgumentResolver());
resolvers.add(new MatrixVariableMethodArgumentResolver());
resolvers.add(new MatrixVariableMapMethodArgumentResolver());
resolvers.add(new ServletModelAttributeMethodProcessor(false));
resolvers.add(new RequestResponseBodyMethodProcessor(getMessageConverters(), this.requestResponseBodyAdvice));
resolvers.add(new RequestPartMethodArgumentResolver(getMessageConverters(), this.requestResponseBodyAdvice));
resolvers.add(new RequestHeaderMethodArgumentResolver(getBeanFactory()));
resolvers.add(new RequestHeaderMapMethodArgumentResolver());
resolvers.add(new ServletCookieValueMethodArgumentResolver(getBeanFactory()));
resolvers.add(new ExpressionValueMethodArgumentResolver(getBeanFactory()));
resolvers.add(new SessionAttributeMethodArgumentResolver());
resolvers.add(new RequestAttributeMethodArgumentResolver());
// Type-based argument resolution
resolvers.add(new ServletRequestMethodArgumentResolver());
resolvers.add(new ServletResponseMethodArgumentResolver());
resolvers.add(new HttpEntityMethodProcessor(getMessageConverters(), this.requestResponseBodyAdvice));
resolvers.add(new RedirectAttributesMethodArgumentResolver());
resolvers.add(new ModelMethodProcessor());
resolvers.add(new MapMethodProcessor());
resolvers.add(new ErrorsMethodArgumentResolver());
resolvers.add(new SessionStatusMethodArgumentResolver());
resolvers.add(new UriComponentsBuilderMethodArgumentResolver());
resolvers.add(new ApiVersionMethodArgumentResolver());
if (KotlinDetector.isKotlinPresent()) {
resolvers.add(new ContinuationHandlerMethodArgumentResolver());
}
// Custom arguments
if (getCustomArgumentResolvers() != null) {
resolvers.addAll(getCustomArgumentResolvers());
}
// Catch-all
resolvers.add(new PrincipalMethodArgumentResolver());
resolvers.add(new RequestParamMethodArgumentResolver(getBeanFactory(), true));
resolvers.add(new ServletModelAttributeMethodProcessor(true));
return resolvers;
}
/**
* Return the list of argument resolvers to use for {@code @InitBinder}
* methods including built-in and custom resolvers.
*/
private List<HandlerMethodArgumentResolver> getDefaultInitBinderArgumentResolvers() {
List<HandlerMethodArgumentResolver> resolvers = new ArrayList<>(20);
// Annotation-based argument resolution
resolvers.add(new RequestParamMethodArgumentResolver(getBeanFactory(), false));
resolvers.add(new RequestParamMapMethodArgumentResolver());
resolvers.add(new PathVariableMethodArgumentResolver());
resolvers.add(new PathVariableMapMethodArgumentResolver());
resolvers.add(new MatrixVariableMethodArgumentResolver());
resolvers.add(new MatrixVariableMapMethodArgumentResolver());
resolvers.add(new ExpressionValueMethodArgumentResolver(getBeanFactory()));
resolvers.add(new SessionAttributeMethodArgumentResolver());
resolvers.add(new RequestAttributeMethodArgumentResolver());
// Type-based argument resolution
resolvers.add(new ServletRequestMethodArgumentResolver());
resolvers.add(new ServletResponseMethodArgumentResolver());
// Custom arguments
if (getCustomArgumentResolvers() != null) {
resolvers.addAll(getCustomArgumentResolvers());
}
// Catch-all
resolvers.add(new PrincipalMethodArgumentResolver());
resolvers.add(new RequestParamMethodArgumentResolver(getBeanFactory(), true));
return resolvers;
}
/**
* Return the list of return value handlers to use including built-in and
* custom handlers provided via {@link #setReturnValueHandlers}.
*/
private List<HandlerMethodReturnValueHandler> getDefaultReturnValueHandlers() {
List<HandlerMethodReturnValueHandler> handlers = new ArrayList<>(20);
ResponseBodyEmitterReturnValueHandler responseBodyEmitterHandler =
new ResponseBodyEmitterReturnValueHandler(getMessageConverters(),
this.reactiveAdapterRegistry, this.taskExecutor, this.contentNegotiationManager,
initViewResolvers(), initLocaleResolver());
HttpEntityMethodProcessor httpEntityMethodProcessor = new HttpEntityMethodProcessor(getMessageConverters(),
this.contentNegotiationManager, this.requestResponseBodyAdvice, this.errorResponseInterceptors);
// Single-purpose return value types
handlers.add(new ModelAndViewMethodReturnValueHandler());
handlers.add(new ModelMethodProcessor());
handlers.add(new ViewMethodReturnValueHandler());
handlers.add(responseBodyEmitterHandler);
handlers.add(new StreamingResponseBodyReturnValueHandler());
handlers.add(new ResponseEntityReturnValueHandler(httpEntityMethodProcessor, responseBodyEmitterHandler));
handlers.add(new HttpHeadersReturnValueHandler());
handlers.add(new CallableMethodReturnValueHandler());
handlers.add(new DeferredResultMethodReturnValueHandler());
handlers.add(new AsyncTaskMethodReturnValueHandler(this.beanFactory));
// Annotation-based return value types
handlers.add(new ServletModelAttributeMethodProcessor(false));
handlers.add(new RequestResponseBodyMethodProcessor(getMessageConverters(),
this.contentNegotiationManager, this.requestResponseBodyAdvice, this.errorResponseInterceptors));
// Multi-purpose return value types
handlers.add(new ViewNameMethodReturnValueHandler());
handlers.add(new MapMethodProcessor());
// Custom return value types
if (getCustomReturnValueHandlers() != null) {
handlers.addAll(getCustomReturnValueHandlers());
}
// Catch-all
if (!CollectionUtils.isEmpty(getModelAndViewResolvers())) {
handlers.add(new ModelAndViewResolverMethodReturnValueHandler(getModelAndViewResolvers()));
}
else {
handlers.add(new ServletModelAttributeMethodProcessor(true));
}
return handlers;
}
private List<ViewResolver> initViewResolvers() {
if (getBeanFactory() instanceof ListableBeanFactory lbf) {
Map<String, ViewResolver> matchingBeans =
BeanFactoryUtils.beansOfTypeIncludingAncestors(lbf, ViewResolver.class, true, false);
if (!matchingBeans.isEmpty()) {
List<ViewResolver> viewResolvers = new ArrayList<>(matchingBeans.values());
AnnotationAwareOrderComparator.sort(viewResolvers);
return viewResolvers;
}
}
return Collections.emptyList();
}
private @Nullable LocaleResolver initLocaleResolver() {
if (getBeanFactory() != null) {
try {
return getBeanFactory().getBean(
DispatcherServlet.LOCALE_RESOLVER_BEAN_NAME, LocaleResolver.class);
}
catch (NoSuchBeanDefinitionException ignored) {
}
}
return null;
}
private static Predicate<MethodParameter> methodParamPredicate(
List<HandlerMethodArgumentResolver> resolvers, Class<?> resolverType) {
return parameter -> {
for (HandlerMethodArgumentResolver resolver : resolvers) {
if (resolver.supportsParameter(parameter)) {
return resolverType.isInstance(resolver);
}
}
return false;
};
}
/**
* Always return {@code true} since any method argument and return value
* type will be processed in some way. A method argument not recognized
* by any HandlerMethodArgumentResolver is interpreted as a request parameter
* if it is a simple type, or as a model attribute otherwise. A return value
* not recognized by any HandlerMethodReturnValueHandler will be interpreted
* as a model attribute.
*/
@Override
protected boolean supportsInternal(HandlerMethod handlerMethod) {
return true;
}
@Override
protected @Nullable ModelAndView handleInternal(HttpServletRequest request,
HttpServletResponse response, HandlerMethod handlerMethod) throws Exception {
ModelAndView mav;
checkRequest(request);
// Execute invokeHandlerMethod in synchronized block if required.
if (this.synchronizeOnSession) {
HttpSession session = request.getSession(false);
if (session != null) {
Object mutex = WebUtils.getSessionMutex(session);
synchronized (mutex) {
mav = invokeHandlerMethod(request, response, handlerMethod);
}
}
else {
// No HttpSession available -> no mutex necessary
mav = invokeHandlerMethod(request, response, handlerMethod);
}
}
else {
// No synchronization on session demanded at all...
mav = invokeHandlerMethod(request, response, handlerMethod);
}
if (!response.containsHeader(HEADER_CACHE_CONTROL)) {
if (getSessionAttributesHandler(handlerMethod).hasSessionAttributes()) {
applyCacheSeconds(response, this.cacheSecondsForSessionAttributeHandlers);
}
else {
prepareResponse(response);
}
}
return mav;
}
/**
* Return the {@link SessionAttributesHandler} instance for the given handler type
* (never {@code null}).
*/
private SessionAttributesHandler getSessionAttributesHandler(HandlerMethod handlerMethod) {
return this.sessionAttributesHandlerCache.computeIfAbsent(
handlerMethod.getBeanType(),
type -> new SessionAttributesHandler(type, this.sessionAttributeStore));
}
/**
* Invoke the {@link RequestMapping} handler method preparing a {@link ModelAndView}
* if view resolution is required.
* @since 4.2
* @see #createInvocableHandlerMethod(HandlerMethod)
*/
protected @Nullable ModelAndView invokeHandlerMethod(HttpServletRequest request,
HttpServletResponse response, HandlerMethod handlerMethod) throws Exception {
WebAsyncManager asyncManager = WebAsyncUtils.getAsyncManager(request);
AsyncWebRequest asyncWebRequest = WebAsyncUtils.createAsyncWebRequest(request, response);
asyncWebRequest.setTimeout(this.asyncRequestTimeout);
asyncManager.setTaskExecutor(this.taskExecutor);
asyncManager.setAsyncWebRequest(asyncWebRequest);
asyncManager.registerCallableInterceptors(this.callableInterceptors);
asyncManager.registerDeferredResultInterceptors(this.deferredResultInterceptors);
// Obtain wrapped response to enforce lifecycle rule from Servlet spec, section 2.3.3.4
response = asyncWebRequest.getNativeResponse(HttpServletResponse.class);
ServletWebRequest webRequest = (asyncWebRequest instanceof ServletWebRequest ?
(ServletWebRequest) asyncWebRequest : new ServletWebRequest(request, response));
WebDataBinderFactory binderFactory = getDataBinderFactory(handlerMethod);
ModelFactory modelFactory = getModelFactory(handlerMethod, binderFactory);
ServletInvocableHandlerMethod invocableMethod = createInvocableHandlerMethod(handlerMethod);
if (this.argumentResolvers != null) {
invocableMethod.setHandlerMethodArgumentResolvers(this.argumentResolvers);
}
if (this.returnValueHandlers != null) {
invocableMethod.setHandlerMethodReturnValueHandlers(this.returnValueHandlers);
}
invocableMethod.setDataBinderFactory(binderFactory);
invocableMethod.setParameterNameDiscoverer(this.parameterNameDiscoverer);
invocableMethod.setMethodValidator(this.methodValidator);
ModelAndViewContainer mavContainer = new ModelAndViewContainer();
mavContainer.addAllAttributes(RequestContextUtils.getInputFlashMap(request));
modelFactory.initModel(webRequest, mavContainer, invocableMethod);
if (asyncManager.hasConcurrentResult()) {
Object result = asyncManager.getConcurrentResult();
Object[] resultContext = asyncManager.getConcurrentResultContext();
Assert.state(resultContext != null && resultContext.length > 0, "Missing result context");
mavContainer = (ModelAndViewContainer) resultContext[0];
asyncManager.clearConcurrentResult();
LogFormatUtils.traceDebug(logger, traceOn -> {
String formatted = LogFormatUtils.formatValue(result, !traceOn);
return "Resume with async result [" + formatted + "]";
});
invocableMethod = invocableMethod.wrapConcurrentResult(result);
}
invocableMethod.invokeAndHandle(webRequest, mavContainer);
if (asyncManager.isConcurrentHandlingStarted()) {
return null;
}
return getModelAndView(mavContainer, modelFactory, webRequest);
}
/**
* Create a {@link ServletInvocableHandlerMethod} from the given {@link HandlerMethod} definition.
* @param handlerMethod the {@link HandlerMethod} definition
* @return the corresponding {@link ServletInvocableHandlerMethod} (or custom subclass thereof)
* @since 4.2
*/
protected ServletInvocableHandlerMethod createInvocableHandlerMethod(HandlerMethod handlerMethod) {
return new ServletInvocableHandlerMethod(handlerMethod);
}
private ModelFactory getModelFactory(HandlerMethod handlerMethod, WebDataBinderFactory binderFactory) {
SessionAttributesHandler sessionAttrHandler = getSessionAttributesHandler(handlerMethod);
Class<?> handlerType = handlerMethod.getBeanType();
Set<Method> methods = this.modelAttributeCache.get(handlerType);
if (methods == null) {
methods = MethodIntrospector.selectMethods(handlerType, MODEL_ATTRIBUTE_METHODS);
this.modelAttributeCache.put(handlerType, methods);
}
List<InvocableHandlerMethod> attrMethods = new ArrayList<>();
// Global methods first
this.modelAttributeAdviceCache.forEach((controllerAdviceBean, methodSet) -> {
if (controllerAdviceBean.isApplicableToBeanType(handlerType)) {
Object bean = controllerAdviceBean.resolveBean();
for (Method method : methodSet) {
attrMethods.add(createModelAttributeMethod(binderFactory, bean, method));
}
}
});
for (Method method : methods) {
Object bean = handlerMethod.getBean();
attrMethods.add(createModelAttributeMethod(binderFactory, bean, method));
}
return new ModelFactory(attrMethods, binderFactory, sessionAttrHandler);
}
private InvocableHandlerMethod createModelAttributeMethod(WebDataBinderFactory factory, Object bean, Method method) {
InvocableHandlerMethod attrMethod = new InvocableHandlerMethod(bean, method);
if (this.argumentResolvers != null) {
attrMethod.setHandlerMethodArgumentResolvers(this.argumentResolvers);
}
attrMethod.setParameterNameDiscoverer(this.parameterNameDiscoverer);
attrMethod.setDataBinderFactory(factory);
return attrMethod;
}
private WebDataBinderFactory getDataBinderFactory(HandlerMethod handlerMethod) throws Exception {
Class<?> handlerType = handlerMethod.getBeanType();
Set<Method> methods = this.initBinderCache.get(handlerType);
if (methods == null) {
methods = MethodIntrospector.selectMethods(handlerType, INIT_BINDER_METHODS);
this.initBinderCache.put(handlerType, methods);
}
List<InvocableHandlerMethod> initBinderMethods = new ArrayList<>();
// Global methods first
this.initBinderAdviceCache.forEach((controllerAdviceBean, methodSet) -> {
if (controllerAdviceBean.isApplicableToBeanType(handlerType)) {
Object bean = controllerAdviceBean.resolveBean();
for (Method method : methodSet) {
initBinderMethods.add(createInitBinderMethod(bean, method));
}
}
});
for (Method method : methods) {
Object bean = handlerMethod.getBean();
initBinderMethods.add(createInitBinderMethod(bean, method));
}
DefaultDataBinderFactory factory = createDataBinderFactory(initBinderMethods);
factory.setMethodValidationApplicable(this.methodValidator != null && handlerMethod.shouldValidateArguments());
return factory;
}
private InvocableHandlerMethod createInitBinderMethod(Object bean, Method method) {
InvocableHandlerMethod binderMethod = new InvocableHandlerMethod(bean, method);
if (this.initBinderArgumentResolvers != null) {
binderMethod.setHandlerMethodArgumentResolvers(this.initBinderArgumentResolvers);
}
binderMethod.setDataBinderFactory(new DefaultDataBinderFactory(this.webBindingInitializer));
binderMethod.setParameterNameDiscoverer(this.parameterNameDiscoverer);
return binderMethod;
}
/**
* Template method to create a new InitBinderDataBinderFactory instance.
* <p>The default implementation creates a ServletRequestDataBinderFactory.
* This can be overridden for custom ServletRequestDataBinder subclasses.
* @param binderMethods {@code @InitBinder} methods
* @return the InitBinderDataBinderFactory instance to use
* @throws Exception in case of invalid state or arguments
*/
protected InitBinderDataBinderFactory createDataBinderFactory(List<InvocableHandlerMethod> binderMethods)
throws Exception {
return new ServletRequestDataBinderFactory(binderMethods, getWebBindingInitializer());
}
private @Nullable ModelAndView getModelAndView(ModelAndViewContainer mavContainer,
ModelFactory modelFactory, NativeWebRequest webRequest) throws Exception {
modelFactory.updateModel(webRequest, mavContainer);
if (mavContainer.isRequestHandled()) {
return null;
}
ModelMap model = mavContainer.getModel();
ModelAndView mav = new ModelAndView(mavContainer.getViewName(), model, mavContainer.getStatus());
if (!mavContainer.isViewReference()) {
mav.setView((View) mavContainer.getView());
}
if (model instanceof RedirectAttributes redirectAttributes) {
Map<String, ?> flashAttributes = redirectAttributes.getFlashAttributes();
HttpServletRequest request = webRequest.getNativeRequest(HttpServletRequest.class);
if (request != null) {
RequestContextUtils.getOutputFlashMap(request).putAll(flashAttributes);
}
}
return mav;
}
/**
* A default Spring MVC AsyncTaskExecutor that warns if used.
*/
@SuppressWarnings("serial")
private | RequestMappingHandlerAdapter |
java | processing__processing4 | java/src/processing/mode/java/ASTViewer.java | {
"start": 683,
"end": 4115
} | class ____ {
final JDialog window;
final JTree tree;
final Consumer<PreprocSketch> updateListener;
ASTViewer(JavaEditor editor, PreprocService pps) {
updateListener = this::buildAndUpdateTree;
window = new JDialog(editor);
tree = new JTree() {
@Override
public String convertValueToText(Object value, boolean selected,
boolean expanded, boolean leaf,
int row, boolean hasFocus) {
if (value instanceof DefaultMutableTreeNode) {
DefaultMutableTreeNode treeNode = (DefaultMutableTreeNode) value;
Object o = treeNode.getUserObject();
if (o instanceof ASTNode) {
ASTNode node = (ASTNode) o;
return CompletionGenerator.getNodeAsString(node);
}
}
return super.convertValueToText(value, selected, expanded, leaf, row, hasFocus);
}
};
tree.setCellRenderer(new ZoomTreeCellRenderer());
window.addComponentListener(new ComponentAdapter() {
@Override
public void componentHidden(ComponentEvent e) {
pps.unregisterListener(updateListener);
tree.setModel(null);
}
});
window.setDefaultCloseOperation(WindowConstants.HIDE_ON_CLOSE);
window.setBounds(new Rectangle(680, 100, 460, 620));
window.setTitle("AST View - " + editor.getSketch().getName());
JScrollPane sp = new JScrollPane();
sp.setViewportView(tree);
window.add(sp);
pps.whenDone(updateListener);
pps.registerListener(updateListener);
tree.addTreeSelectionListener(e -> {
if (tree.getLastSelectedPathComponent() != null) {
DefaultMutableTreeNode treeNode =
(DefaultMutableTreeNode) tree.getLastSelectedPathComponent();
if (treeNode.getUserObject() instanceof ASTNode) {
ASTNode node = (ASTNode) treeNode.getUserObject();
pps.whenDone(ps -> {
SketchInterval si = ps.mapJavaToSketch(node);
if (!ps.inRange(si)) return;
EventQueue.invokeLater(() -> editor.highlight(si.tabIndex, si.startTabOffset, si.stopTabOffset));
});
}
}
});
}
void dispose() {
if (window != null) {
window.dispose();
}
}
// Thread: worker
void buildAndUpdateTree(PreprocSketch ps) {
CompilationUnit cu = ps.compilationUnit;
if (cu.types().isEmpty()){
Messages.err("No Type found in CU");
return;
}
Deque<DefaultMutableTreeNode> treeNodeStack = new ArrayDeque<>();
ASTNode type0 = (ASTNode) cu.types().get(0);
type0.accept(new ASTVisitor() {
@Override
public boolean preVisit2(ASTNode node) {
treeNodeStack.push(new DefaultMutableTreeNode(node));
return super.preVisit2(node);
}
@Override
public void postVisit(ASTNode node) {
if (treeNodeStack.size() > 1) {
DefaultMutableTreeNode treeNode = treeNodeStack.pop();
treeNodeStack.peek().add(treeNode);
}
}
});
DefaultMutableTreeNode codeTree = treeNodeStack.pop();
EventQueue.invokeLater(() -> {
if (tree.hasFocus() || window.hasFocus()) {
return;
}
tree.setModel(new DefaultTreeModel(codeTree));
((DefaultTreeModel) tree.getModel()).reload();
tree.validate();
if (!window.isVisible()) {
window.setVisible(true);
}
});
}
} | ASTViewer |
java | google__dagger | javatests/dagger/internal/codegen/BindsMissingDelegateValidationTest.java | {
"start": 9070,
"end": 9349
} | interface ____");
} catch (Error e) {
errors.add(e);
}
try {
subject.hasErrorContaining("same map key is bound more than once")
.onSource(component)
.onLineContaining(" | C |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnusedNestedClassTest.java | {
"start": 5853,
"end": 6133
} | class ____ {}
}
""")
.doTest();
}
@Test
public void usedReflectively_suppressed() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import com.google.errorprone.annotations.Keep;
| B |
java | google__guice | extensions/dagger-adapter/test/com/google/inject/daggeradapter/DaggerAdapterTest.java | {
"start": 1736,
"end": 1805
} | class ____ extends TestCase {
@dagger.Module
static | DaggerAdapterTest |
java | bumptech__glide | benchmark/src/androidTest/java/com/bumptech/glide/benchmark/data/DataOpener.java | {
"start": 4145,
"end": 4629
} | class ____ implements DataOpener<ByteBuffer> {
private final FileOpener fileOpener = new FileOpener();
@Nullable private File file;
@Override
public ByteBuffer acquire(@RawRes int resourceId) throws IOException {
file = fileOpener.acquire(resourceId);
return ByteBufferUtil.fromFile(file);
}
@Override
public void close(ByteBuffer data) {
if (file != null) {
fileOpener.close(file);
}
}
}
}
| MemoryMappedByteBufferOpener |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/injection/constructornoinject/SingleNonNoArgConstructorInjectionTest.java | {
"start": 406,
"end": 742
} | class ____ {
@RegisterExtension
public ArcTestContainer container = new ArcTestContainer(Head.class, CombineHarvester.class);
@Test
public void testInjection() {
assertNotNull(Arc.container().instance(CombineHarvester.class).get().getHead());
}
@Dependent
static | SingleNonNoArgConstructorInjectionTest |
java | spring-projects__spring-boot | module/spring-boot-jdbc/src/main/java/org/springframework/boot/jdbc/docker/compose/MariaDbEnvironment.java | {
"start": 982,
"end": 2932
} | class ____ {
private final String username;
private final String password;
private final String database;
MariaDbEnvironment(Map<String, @Nullable String> env) {
this.username = extractUsername(env);
this.password = extractPassword(env);
this.database = extractDatabase(env);
}
private String extractUsername(Map<String, @Nullable String> env) {
String user = env.get("MARIADB_USER");
if (user == null) {
user = env.get("MYSQL_USER");
}
return (user != null) ? user : "root";
}
private String extractPassword(Map<String, @Nullable String> env) {
Assert.state(!env.containsKey("MARIADB_RANDOM_ROOT_PASSWORD"), "MARIADB_RANDOM_ROOT_PASSWORD is not supported");
Assert.state(!env.containsKey("MYSQL_RANDOM_ROOT_PASSWORD"), "MYSQL_RANDOM_ROOT_PASSWORD is not supported");
Assert.state(!env.containsKey("MARIADB_ROOT_PASSWORD_HASH"), "MARIADB_ROOT_PASSWORD_HASH is not supported");
boolean allowEmpty = env.containsKey("MARIADB_ALLOW_EMPTY_PASSWORD")
|| env.containsKey("MYSQL_ALLOW_EMPTY_PASSWORD") || env.containsKey("ALLOW_EMPTY_PASSWORD");
String password = env.get("MARIADB_PASSWORD");
password = (password != null) ? password : env.get("MYSQL_PASSWORD");
password = (password != null) ? password : env.get("MARIADB_ROOT_PASSWORD");
password = (password != null) ? password : env.get("MYSQL_ROOT_PASSWORD");
Assert.state(StringUtils.hasLength(password) || allowEmpty, "No MariaDB password found");
return (password != null) ? password : "";
}
private String extractDatabase(Map<String, @Nullable String> env) {
String database = env.get("MARIADB_DATABASE");
database = (database != null) ? database : env.get("MYSQL_DATABASE");
Assert.state(database != null, "No MARIADB_DATABASE defined");
return database;
}
String getUsername() {
return this.username;
}
String getPassword() {
return this.password;
}
String getDatabase() {
return this.database;
}
}
| MariaDbEnvironment |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/common/util/iterable/Iterables.java | {
"start": 1211,
"end": 3547
} | class ____<T> implements Iterable<T> {
private final Iterable<? extends Iterable<T>> inputs;
FlattenedIterables(Iterable<? extends Iterable<T>> inputs) {
List<Iterable<T>> list = new ArrayList<>();
for (Iterable<T> iterable : inputs) {
list.add(iterable);
}
this.inputs = list;
}
@Override
public Iterator<T> iterator() {
return StreamSupport.stream(inputs.spliterator(), false).flatMap(s -> StreamSupport.stream(s.spliterator(), false)).iterator();
}
}
public static <T> T get(Iterable<T> iterable, int position) {
Objects.requireNonNull(iterable);
if (position < 0) {
throw new IllegalArgumentException("position >= 0");
}
if (iterable instanceof List<T> list) {
if (position >= list.size()) {
throw new IndexOutOfBoundsException(Integer.toString(position));
}
return list.get(position);
} else {
Iterator<T> it = iterable.iterator();
for (int index = 0; index < position; index++) {
if (it.hasNext() == false) {
throw new IndexOutOfBoundsException(Integer.toString(position));
}
it.next();
}
if (it.hasNext() == false) {
throw new IndexOutOfBoundsException(Integer.toString(position));
}
return it.next();
}
}
public static <T> int indexOf(Iterable<T> iterable, Predicate<T> predicate) {
int i = 0;
for (T element : iterable) {
if (predicate.test(element)) {
return i;
}
i++;
}
return -1;
}
public static long size(Iterable<?> iterable) {
return StreamSupport.stream(iterable.spliterator(), false).count();
}
/**
* Adds a wrapper around {@code iterable} which asserts that {@link Iterator#remove()} is not called on the iterator it returns.
*/
public static <T> Iterable<T> assertReadOnly(Iterable<T> iterable) {
if (Assertions.ENABLED) {
return () -> Iterators.assertReadOnly(iterable.iterator());
} else {
return iterable;
}
}
}
| FlattenedIterables |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/guide/FakeUtils2.java | {
"start": 817,
"end": 954
} | class ____ {
public static final Function<? super Flux<String>, Flux<Tuple2<Long, String>>> enrichUser =
f -> f.elapsed();
}
| FakeUtils2 |
java | elastic__elasticsearch | libs/ssl-config/src/main/java/org/elasticsearch/common/ssl/SslDiagnostics.java | {
"start": 1076,
"end": 2466
} | class ____ {
public static final SslDiagnostics INSTANCE = new SslDiagnostics(Clock.systemUTC());
public SslDiagnostics(Clock clock) {
this.clock = clock;
}
private final Clock clock;
public static List<String> describeValidHostnames(X509Certificate certificate) {
try {
final Collection<List<?>> names = certificate.getSubjectAlternativeNames();
if (names == null || names.isEmpty()) {
return Collections.emptyList();
}
final List<String> description = new ArrayList<>(names.size());
for (List<?> pair : names) {
if (pair == null || pair.size() != 2) {
continue;
}
if ((pair.get(0) instanceof Integer) == false || (pair.get(1) instanceof String) == false) {
continue;
}
final int type = ((Integer) pair.get(0)).intValue();
final String name = (String) pair.get(1);
if (type == 2) {
description.add("DNS:" + name);
} else if (type == 7) {
description.add("IP:" + name);
}
}
return description;
} catch (CertificateParsingException e) {
return Collections.emptyList();
}
}
public | SslDiagnostics |
java | google__error-prone | core/src/test/java/com/google/errorprone/refaster/testdata/output/IsInstanceTemplateExample.java | {
"start": 762,
"end": 873
} | class ____ {
public void foo() {
System.out.println("foo" instanceof String);
}
}
| IsInstanceTemplateExample |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/EmptyObjectTest.java | {
"start": 246,
"end": 1017
} | class ____ extends TestCase {
public void test_codec_null() throws Exception {
V0 v = new V0();
SerializeConfig mapping = new SerializeConfig();
mapping.setAsmEnable(false);
String text = JSON.toJSONString(v, mapping, SerializerFeature.WriteMapNullValue);
Assert.assertEquals("{}", text);
JSON.parseObject(text, V0.class);
}
public void test_codec_null_1() throws Exception {
V0 v = new V0();
SerializeConfig mapping = new SerializeConfig();
mapping.setAsmEnable(false);
String text = JSON.toJSONString(v, mapping, SerializerFeature.WriteMapNullValue, SerializerFeature.WriteNullNumberAsZero);
Assert.assertEquals("{}", text);
}
public static | EmptyObjectTest |
java | quarkusio__quarkus | extensions/mongodb-client/deployment/src/test/java/io/quarkus/mongodb/MongoTracingNotEnabledTest.java | {
"start": 435,
"end": 1221
} | class ____ extends MongoTestBase {
@Inject
MongoClient client;
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class).addClasses(MongoTestBase.class,
MockReactiveContextProvider.class))
.withConfigurationResource("default-mongoclient.properties");
@AfterEach
void cleanup() {
if (client != null) {
client.close();
}
}
@Test
void contextProviderMustNotBeCalledIfNoOpenTelemetryIsAvailable() {
assertThat(client.listDatabaseNames().first()).isNotEmpty();
assertThat(MockReactiveContextProvider.EVENTS).isEmpty();
}
}
| MongoTracingNotEnabledTest |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/options/KeysOptions.java | {
"start": 744,
"end": 976
} | interface ____ extends InvocationOptions<KeysOptions> {
/**
* Creates the default options
*
* @return options instance
*/
static KeysOptions defaults() {
return new KeysParams();
}
}
| KeysOptions |
java | elastic__elasticsearch | modules/apm/src/test/java/org/elasticsearch/telemetry/apm/RecordingOtelMeter.java | {
"start": 10714,
"end": 11868
} | class ____ extends AbstractBuilder implements DoubleUpDownCounterBuilder {
RecordingDoubleUpDownBuilder(AbstractBuilder other) {
super(other);
}
@Override
public DoubleUpDownCounterBuilder setDescription(String description) {
innerSetDescription(description);
return this;
}
@Override
public DoubleUpDownCounterBuilder setUnit(String unit) {
innerSetUnit(unit);
return this;
}
@Override
public DoubleUpDownCounter build() {
DoubleUpDownRecorder counter = new DoubleUpDownRecorder(name);
recorder.register(counter, counter.getInstrument(), name, description, unit);
return counter;
}
@Override
public ObservableDoubleUpDownCounter buildWithCallback(Consumer<ObservableDoubleMeasurement> callback) {
unimplemented();
return null;
}
@Override
public ObservableDoubleMeasurement buildObserver() {
unimplemented();
return null;
}
}
private | RecordingDoubleUpDownBuilder |
java | jhy__jsoup | src/main/java/org/jsoup/nodes/Printer.java | {
"start": 1781,
"end": 7574
} | class ____ extends Printer {
boolean preserveWhitespace = false;
Pretty(Node root, QuietAppendable accum, OutputSettings settings) {
super(root, accum, settings);
// check if there is a pre on stack
for (Node node = root; node != null; node = node.parentNode()) {
if (tagIs(Tag.PreserveWhitespace, node)) {
preserveWhitespace = true;
break;
}
}
}
@Override
void addHead(Element el, int depth) {
if (shouldIndent(el))
indent(depth);
super.addHead(el, depth);
if (tagIs(Tag.PreserveWhitespace, el)) preserveWhitespace = true;
}
@Override
void addTail(Element el, int depth) {
if (shouldIndent(nextNonBlank(el.firstChild()))) {
indent(depth);
}
super.addTail(el, depth);
// clear the preserveWhitespace if this element is not, and there are none on the stack above
if (preserveWhitespace && el.tag.is(Tag.PreserveWhitespace)) {
for (Element parent = el.parent(); parent != null; parent = parent.parent()) {
if (parent.tag().preserveWhitespace()) return; // keep
}
preserveWhitespace = false;
}
}
@Override
void addNode(LeafNode node, int depth) {
if (shouldIndent(node))
indent(depth);
super.addNode(node, depth);
}
@Override
void addText(TextNode node, int textOptions, int depth) {
if (!preserveWhitespace) {
textOptions |= Entities.Normalise;
textOptions = textTrim(node, textOptions);
if (!node.isBlank() && isBlockEl(node.parentNode) && shouldIndent(node))
indent(depth);
}
super.addText(node, textOptions, depth);
}
int textTrim(TextNode node, int options) {
if (!isBlockEl(node.parentNode)) return options; // don't trim inline, whitespace significant
Node prev = node.previousSibling();
Node next = node.nextSibling();
// if previous is not an inline element
if (!(prev instanceof Element && !isBlockEl(prev))) {
// if there is no previous sib; or not a text node and should be indented
if (prev == null || !(prev instanceof TextNode) && shouldIndent(prev))
options |= Entities.TrimLeading;
}
if (next == null || !(next instanceof TextNode) && shouldIndent(next)) {
options |= Entities.TrimTrailing;
} else { // trim trailing whitespace if the next non-empty TextNode has leading whitespace
next = nextNonBlank(next);
if (next instanceof TextNode && StringUtil.isWhitespace(next.nodeValue().codePointAt(0)))
options |= Entities.TrimTrailing;
}
return options;
}
boolean shouldIndent(@Nullable Node node) {
if (node == null || node == root || preserveWhitespace || isBlankText(node))
return false;
if (isBlockEl(node))
return true;
Node prevSib = previousNonblank(node);
if (isBlockEl(prevSib)) return true;
Element parent = node.parentNode;
if (!isBlockEl(parent) || parent.tag().is(Tag.InlineContainer) || !hasNonTextNodes(parent))
return false;
return prevSib == null ||
(!(prevSib instanceof TextNode) &&
(isBlockEl(prevSib) || !(prevSib instanceof Element)));
}
boolean isBlockEl(@Nullable Node node) {
if (node == null) return false;
if (node instanceof Element) {
Element el = (Element) node;
return el.isBlock() ||
(!el.tag.isKnownTag() && (el.parentNode instanceof Document || hasChildBlocks(el)));
}
return false;
}
/**
Returns true if any of the Element's child nodes should indent. Checks the last 5 nodes only (to minimize
scans).
*/
static boolean hasChildBlocks(Element el) {
Element child = el.firstElementChild();
for (int i = 0; i < maxScan && child != null; i++) {
if (child.isBlock() || !child.tag.isKnownTag()) return true;
child = child.nextElementSibling();
}
return false;
}
static private final int maxScan = 5;
static boolean hasNonTextNodes(Element el) {
Node child = el.firstChild();
for (int i = 0; i < maxScan && child != null; i++) {
if (!(child instanceof TextNode)) return true;
child = child.nextSibling();
}
return false;
}
static @Nullable Node previousNonblank(Node node) {
Node prev = node.previousSibling();
while (isBlankText(prev)) prev = prev.previousSibling();
return prev;
}
static @Nullable Node nextNonBlank(@Nullable Node node) {
while (isBlankText(node)) node = node.nextSibling();
return node;
}
static boolean isBlankText(@Nullable Node node) {
return node instanceof TextNode && ((TextNode) node).isBlank();
}
static boolean tagIs(int option, @Nullable Node node) {
return node instanceof Element && ((Element) node).tag.is(option);
}
}
/** Outline Printer */
static | Pretty |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/ClassInitializationDeadlock.java | {
"start": 3171,
"end": 6009
} | interface ____ default methods.
return NO_MATCH;
}
new SuppressibleTreePathScanner<Void, Void>(state) {
@Override
public Void visitClass(ClassTree node, Void unused) {
for (Tree member : node.getMembers()) {
if (member instanceof ClassTree) {
continue;
}
scan(member, null);
}
return null;
}
@Override
public Void visitBlock(BlockTree tree, Void unused) {
if (tree.isStatic()) {
scanForSubtypes(getCurrentPath(), classSymbol, state);
}
return null;
}
@Override
public Void visitVariable(VariableTree tree, Void unused) {
ExpressionTree initializer = tree.getInitializer();
if (getSymbol(tree).isStatic() && initializer != null) {
scanForSubtypes(new TreePath(getCurrentPath(), initializer), classSymbol, state);
}
return null;
}
}.scan(state.getPath(), null);
return NO_MATCH;
}
private void scanForSubtypes(TreePath path, ClassSymbol classSymbol, VisitorState state) {
new TreePathScanner<Void, Void>() {
@Override
public Void visitClass(ClassTree node, Void unused) {
return null;
}
@Override
public Void visitMethod(MethodTree node, Void unused) {
return null;
}
@Override
public Void visitMemberSelect(MemberSelectTree tree, Void unused) {
if (ASTHelpers.constValue(tree) != null) {
return null;
}
if (tree.getIdentifier().contentEquals("class")) {
return null;
}
handle(tree);
return super.visitMemberSelect(tree, null);
}
@Override
public Void visitIdentifier(IdentifierTree tree, Void unused) {
if (ASTHelpers.constValue(tree) != null) {
return null;
}
handle(tree);
return null;
}
private void handle(ExpressionTree tree) {
if (!(getSymbol(tree) instanceof ClassSymbol use)) {
return;
}
if (use.equals(classSymbol)) {
return;
}
if (!use.isSubClass(classSymbol, state.getTypes())) {
return;
}
if (use.isEnclosedBy(classSymbol) && !isStatic(use)) {
// Nested inner classes implicitly take the enclosing instance as a constructor parameter,
// and can't be initialized without first initializing their containing class.
return;
}
ImmutableSet<ClassSymbol> nonPrivateInstantiators =
nonPrivateInstantiators(use, classSymbol, state.getTypes());
if (nonPrivateInstantiators.isEmpty()) {
return;
}
StringBuilder message = new StringBuilder();
message.append(
String.format(
"Possible | with |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/test/java/org/springframework/security/oauth2/server/authorization/web/OAuth2TokenRevocationEndpointFilterTests.java | {
"start": 3570,
"end": 17109
} | class ____ {
private static final String DEFAULT_TOKEN_REVOCATION_ENDPOINT_URI = "/oauth2/revoke";
private AuthenticationManager authenticationManager;
private OAuth2TokenRevocationEndpointFilter filter;
private final HttpMessageConverter<OAuth2Error> errorHttpResponseConverter = new OAuth2ErrorHttpMessageConverter();
@BeforeEach
public void setUp() {
this.authenticationManager = mock(AuthenticationManager.class);
this.filter = new OAuth2TokenRevocationEndpointFilter(this.authenticationManager);
}
@AfterEach
public void cleanup() {
SecurityContextHolder.clearContext();
}
@Test
public void constructorWhenAuthenticationManagerNullThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> new OAuth2TokenRevocationEndpointFilter(null))
.withMessage("authenticationManager cannot be null");
}
@Test
public void constructorWhenTokenRevocationEndpointUriNullThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> new OAuth2TokenRevocationEndpointFilter(this.authenticationManager, null))
.withMessage("tokenRevocationEndpointUri cannot be empty");
}
@Test
public void setAuthenticationDetailsSourceWhenNullThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> this.filter.setAuthenticationDetailsSource(null))
.withMessage("authenticationDetailsSource cannot be null");
}
@Test
public void setAuthenticationConverterWhenNullThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> this.filter.setAuthenticationConverter(null))
.withMessage("authenticationConverter cannot be null");
}
@Test
public void setAuthenticationSuccessHandlerWhenNullThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> this.filter.setAuthenticationSuccessHandler(null))
.withMessage("authenticationSuccessHandler cannot be null");
}
@Test
public void setAuthenticationFailureHandlerWhenNullThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> this.filter.setAuthenticationFailureHandler(null))
.withMessage("authenticationFailureHandler cannot be null");
}
@Test
public void doFilterWhenNotTokenRevocationRequestThenNotProcessed() throws Exception {
String requestUri = "/path";
MockHttpServletRequest request = new MockHttpServletRequest("POST", requestUri);
request.setServletPath(requestUri);
MockHttpServletResponse response = new MockHttpServletResponse();
FilterChain filterChain = mock(FilterChain.class);
this.filter.doFilter(request, response, filterChain);
verify(filterChain).doFilter(any(HttpServletRequest.class), any(HttpServletResponse.class));
}
@Test
public void doFilterWhenTokenRevocationRequestGetThenNotProcessed() throws Exception {
String requestUri = DEFAULT_TOKEN_REVOCATION_ENDPOINT_URI;
MockHttpServletRequest request = new MockHttpServletRequest("GET", requestUri);
request.setServletPath(requestUri);
MockHttpServletResponse response = new MockHttpServletResponse();
FilterChain filterChain = mock(FilterChain.class);
this.filter.doFilter(request, response, filterChain);
verify(filterChain).doFilter(any(HttpServletRequest.class), any(HttpServletResponse.class));
}
@Test
public void doFilterWhenTokenRevocationRequestMissingTokenThenInvalidRequestError() throws Exception {
doFilterWhenTokenRevocationRequestInvalidParameterThenError(OAuth2ParameterNames.TOKEN,
OAuth2ErrorCodes.INVALID_REQUEST, (request) -> request.removeParameter(OAuth2ParameterNames.TOKEN));
}
@Test
public void doFilterWhenTokenRevocationRequestMultipleTokenThenInvalidRequestError() throws Exception {
doFilterWhenTokenRevocationRequestInvalidParameterThenError(OAuth2ParameterNames.TOKEN,
OAuth2ErrorCodes.INVALID_REQUEST,
(request) -> request.addParameter(OAuth2ParameterNames.TOKEN, "token-2"));
}
@Test
public void doFilterWhenTokenRevocationRequestMultipleTokenTypeHintThenInvalidRequestError() throws Exception {
doFilterWhenTokenRevocationRequestInvalidParameterThenError(OAuth2ParameterNames.TOKEN_TYPE_HINT,
OAuth2ErrorCodes.INVALID_REQUEST, (request) -> request
.addParameter(OAuth2ParameterNames.TOKEN_TYPE_HINT, OAuth2TokenType.ACCESS_TOKEN.getValue()));
}
@Test
public void doFilterWhenTokenRevocationRequestValidThenSuccessResponse() throws Exception {
RegisteredClient registeredClient = TestRegisteredClients.registeredClient().build();
Authentication clientPrincipal = new OAuth2ClientAuthenticationToken(registeredClient,
ClientAuthenticationMethod.CLIENT_SECRET_BASIC, registeredClient.getClientSecret());
OAuth2AccessToken accessToken = new OAuth2AccessToken(OAuth2AccessToken.TokenType.BEARER, "token",
Instant.now(), Instant.now().plus(Duration.ofHours(1)),
new HashSet<>(Arrays.asList("scope1", "scope2")));
OAuth2TokenRevocationAuthenticationToken tokenRevocationAuthentication = new OAuth2TokenRevocationAuthenticationToken(
accessToken, clientPrincipal);
given(this.authenticationManager.authenticate(any())).willReturn(tokenRevocationAuthentication);
SecurityContext securityContext = SecurityContextHolder.createEmptyContext();
securityContext.setAuthentication(clientPrincipal);
SecurityContextHolder.setContext(securityContext);
MockHttpServletRequest request = createTokenRevocationRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
FilterChain filterChain = mock(FilterChain.class);
this.filter.doFilter(request, response, filterChain);
verifyNoInteractions(filterChain);
verify(this.authenticationManager).authenticate(any());
assertThat(response.getStatus()).isEqualTo(HttpStatus.OK.value());
}
@Test
public void doFilterWhenCustomAuthenticationDetailsSourceThenUsed() throws Exception {
RegisteredClient registeredClient = TestRegisteredClients.registeredClient().build();
Authentication clientPrincipal = new OAuth2ClientAuthenticationToken(registeredClient,
ClientAuthenticationMethod.CLIENT_SECRET_BASIC, registeredClient.getClientSecret());
MockHttpServletRequest request = createTokenRevocationRequest();
AuthenticationDetailsSource<HttpServletRequest, WebAuthenticationDetails> authenticationDetailsSource = mock(
AuthenticationDetailsSource.class);
WebAuthenticationDetails webAuthenticationDetails = new WebAuthenticationDetails(request);
given(authenticationDetailsSource.buildDetails(any())).willReturn(webAuthenticationDetails);
this.filter.setAuthenticationDetailsSource(authenticationDetailsSource);
OAuth2AccessToken accessToken = new OAuth2AccessToken(OAuth2AccessToken.TokenType.BEARER, "token",
Instant.now(), Instant.now().plus(Duration.ofHours(1)),
new HashSet<>(Arrays.asList("scope1", "scope2")));
OAuth2TokenRevocationAuthenticationToken tokenRevocationAuthentication = new OAuth2TokenRevocationAuthenticationToken(
accessToken, clientPrincipal);
given(this.authenticationManager.authenticate(any())).willReturn(tokenRevocationAuthentication);
SecurityContext securityContext = SecurityContextHolder.createEmptyContext();
securityContext.setAuthentication(clientPrincipal);
SecurityContextHolder.setContext(securityContext);
MockHttpServletResponse response = new MockHttpServletResponse();
FilterChain filterChain = mock(FilterChain.class);
this.filter.doFilter(request, response, filterChain);
verify(authenticationDetailsSource).buildDetails(any());
}
@Test
public void doFilterWhenCustomAuthenticationConverterThenUsed() throws Exception {
RegisteredClient registeredClient = TestRegisteredClients.registeredClient().build();
Authentication clientPrincipal = new OAuth2ClientAuthenticationToken(registeredClient,
ClientAuthenticationMethod.CLIENT_SECRET_BASIC, registeredClient.getClientSecret());
OAuth2AccessToken accessToken = new OAuth2AccessToken(OAuth2AccessToken.TokenType.BEARER, "token",
Instant.now(), Instant.now().plus(Duration.ofHours(1)),
new HashSet<>(Arrays.asList("scope1", "scope2")));
OAuth2TokenRevocationAuthenticationToken tokenRevocationAuthentication = new OAuth2TokenRevocationAuthenticationToken(
accessToken, clientPrincipal);
AuthenticationConverter authenticationConverter = mock(AuthenticationConverter.class);
given(authenticationConverter.convert(any())).willReturn(tokenRevocationAuthentication);
this.filter.setAuthenticationConverter(authenticationConverter);
given(this.authenticationManager.authenticate(any())).willReturn(tokenRevocationAuthentication);
SecurityContext securityContext = SecurityContextHolder.createEmptyContext();
securityContext.setAuthentication(clientPrincipal);
SecurityContextHolder.setContext(securityContext);
MockHttpServletRequest request = createTokenRevocationRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
FilterChain filterChain = mock(FilterChain.class);
this.filter.doFilter(request, response, filterChain);
verify(authenticationConverter).convert(any());
}
@Test
public void doFilterWhenCustomAuthenticationSuccessHandlerThenUsed() throws Exception {
RegisteredClient registeredClient = TestRegisteredClients.registeredClient().build();
Authentication clientPrincipal = new OAuth2ClientAuthenticationToken(registeredClient,
ClientAuthenticationMethod.CLIENT_SECRET_BASIC, registeredClient.getClientSecret());
OAuth2AccessToken accessToken = new OAuth2AccessToken(OAuth2AccessToken.TokenType.BEARER, "token",
Instant.now(), Instant.now().plus(Duration.ofHours(1)),
new HashSet<>(Arrays.asList("scope1", "scope2")));
OAuth2TokenRevocationAuthenticationToken tokenRevocationAuthentication = new OAuth2TokenRevocationAuthenticationToken(
accessToken, clientPrincipal);
AuthenticationSuccessHandler authenticationSuccessHandler = mock(AuthenticationSuccessHandler.class);
this.filter.setAuthenticationSuccessHandler(authenticationSuccessHandler);
given(this.authenticationManager.authenticate(any())).willReturn(tokenRevocationAuthentication);
SecurityContext securityContext = SecurityContextHolder.createEmptyContext();
securityContext.setAuthentication(clientPrincipal);
SecurityContextHolder.setContext(securityContext);
MockHttpServletRequest request = createTokenRevocationRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
FilterChain filterChain = mock(FilterChain.class);
this.filter.doFilter(request, response, filterChain);
verify(authenticationSuccessHandler).onAuthenticationSuccess(any(), any(), any());
}
@Test
public void doFilterWhenCustomAuthenticationFailureHandlerThenUsed() throws Exception {
RegisteredClient registeredClient = TestRegisteredClients.registeredClient().build();
Authentication clientPrincipal = new OAuth2ClientAuthenticationToken(registeredClient,
ClientAuthenticationMethod.CLIENT_SECRET_BASIC, registeredClient.getClientSecret());
AuthenticationFailureHandler authenticationFailureHandler = mock(AuthenticationFailureHandler.class);
this.filter.setAuthenticationFailureHandler(authenticationFailureHandler);
given(this.authenticationManager.authenticate(any())).willThrow(OAuth2AuthenticationException.class);
SecurityContext securityContext = SecurityContextHolder.createEmptyContext();
securityContext.setAuthentication(clientPrincipal);
SecurityContextHolder.setContext(securityContext);
MockHttpServletRequest request = createTokenRevocationRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
FilterChain filterChain = mock(FilterChain.class);
this.filter.doFilter(request, response, filterChain);
verify(authenticationFailureHandler).onAuthenticationFailure(any(), any(), any());
}
private void doFilterWhenTokenRevocationRequestInvalidParameterThenError(String parameterName, String errorCode,
Consumer<MockHttpServletRequest> requestConsumer) throws Exception {
MockHttpServletRequest request = createTokenRevocationRequest();
requestConsumer.accept(request);
MockHttpServletResponse response = new MockHttpServletResponse();
FilterChain filterChain = mock(FilterChain.class);
this.filter.doFilter(request, response, filterChain);
verifyNoInteractions(filterChain);
assertThat(response.getStatus()).isEqualTo(HttpStatus.BAD_REQUEST.value());
OAuth2Error error = readError(response);
assertThat(error.getErrorCode()).isEqualTo(errorCode);
assertThat(error.getDescription()).isEqualTo("OAuth 2.0 Token Revocation Parameter: " + parameterName);
}
private OAuth2Error readError(MockHttpServletResponse response) throws Exception {
MockClientHttpResponse httpResponse = new MockClientHttpResponse(response.getContentAsByteArray(),
HttpStatus.valueOf(response.getStatus()));
return this.errorHttpResponseConverter.read(OAuth2Error.class, httpResponse);
}
private static MockHttpServletRequest createTokenRevocationRequest() {
String requestUri = DEFAULT_TOKEN_REVOCATION_ENDPOINT_URI;
MockHttpServletRequest request = new MockHttpServletRequest("POST", requestUri);
request.setServletPath(requestUri);
request.addParameter(OAuth2ParameterNames.TOKEN, "token");
request.addParameter(OAuth2ParameterNames.TOKEN_TYPE_HINT, OAuth2TokenType.ACCESS_TOKEN.getValue());
return request;
}
}
| OAuth2TokenRevocationEndpointFilterTests |
java | apache__camel | components/camel-ai/camel-langchain4j-agent-api/src/main/java/org/apache/camel/component/langchain4j/agent/api/AgentConfiguration.java | {
"start": 5647,
"end": 6068
} | class ____
* @return this configuration instance for method chaining
* @see #parseGuardrailClasses(String)
*/
public AgentConfiguration withInputGuardrailClassesList(String inputGuardrailClasses) {
return withInputGuardrailClasses(parseGuardrailClasses(inputGuardrailClasses));
}
/**
* Sets input guardrail classes from an array of | names |
java | quarkusio__quarkus | integration-tests/rest-client-reactive/src/main/java/io/quarkus/it/rest/client/main/DefaultCtorTestFilter.java | {
"start": 148,
"end": 328
} | class ____ implements ClientRequestFilter {
@Override
public void filter(ClientRequestContext requestContext) {
// Do nothing on purpose.
}
}
| DefaultCtorTestFilter |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/EntityTableMappingsTests.java | {
"start": 4304,
"end": 4768
} | class ____ {
@Id
private Integer id;
@Basic
private String name;
private UnionRoot() {
// for use by Hibernate
}
public UnionRoot(Integer id, String name) {
this.id = id;
this.name = name;
}
public Integer getId() {
return id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
@Entity( name = "UnionSub1" )
@Table( name = "unions_subs1" )
public static | UnionRoot |
java | google__guice | core/test/com/google/inject/BindingTest.java | {
"start": 8264,
"end": 9644
} | class ____ uses raw types.
final Constructor<C> constructor = C.class.getConstructor(Stage.class, Object.class);
final Key<Object> s = new Key<Object>(named("s")) {};
final Key<Object> i = new Key<Object>(named("i")) {};
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(s).toConstructor(constructor, new TypeLiteral<C<Stage>>() {});
bind(i).toConstructor(constructor, new TypeLiteral<C<Injector>>() {});
}
});
// Safe because the correct generic type was used when the constructor was bound
@SuppressWarnings("unchecked")
C<Stage> one = (C<Stage>) injector.getInstance(s);
assertEquals(Stage.DEVELOPMENT, one.stage);
assertEquals(Stage.DEVELOPMENT, one.t);
assertEquals(Stage.DEVELOPMENT, one.anotherT);
// Safe because the correct generic type was used when the constructor was bound
@SuppressWarnings("unchecked")
C<Injector> two = (C<Injector>) injector.getInstance(i);
assertEquals(Stage.DEVELOPMENT, two.stage);
assertEquals(injector, two.t);
assertEquals(injector, two.anotherT);
}
@Test
public void testToConstructorBindingsFailsOnRawTypes() throws NoSuchMethodException {
@SuppressWarnings("rawtypes") // Unavoidable because | literal |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/observable/ObservableSerializeTest.java | {
"start": 9172,
"end": 10820
} | class ____ implements ObservableSource<String> {
final String[] values;
private Thread t;
TestSingleThreadedObservable(final String... values) {
this.values = values;
}
@Override
public void subscribe(final Observer<? super String> observer) {
observer.onSubscribe(Disposable.empty());
System.out.println("TestSingleThreadedObservable subscribed to ...");
t = new Thread(new Runnable() {
@Override
public void run() {
try {
System.out.println("running TestSingleThreadedObservable thread");
for (String s : values) {
System.out.println("TestSingleThreadedObservable onNext: " + s);
observer.onNext(s);
}
observer.onComplete();
} catch (Throwable e) {
throw new RuntimeException(e);
}
}
});
System.out.println("starting TestSingleThreadedObservable thread");
t.start();
System.out.println("done starting TestSingleThreadedObservable thread");
}
public void waitToFinish() {
try {
t.join();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
/**
* This spawns a thread for the subscription, then a separate thread for each onNext call.
*/
private static | TestSingleThreadedObservable |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/utils/CommonPythonUtil.java | {
"start": 4944,
"end": 27106
} | class ____ {
private static Method pickleValue = null;
private static final String PYTHON_CONFIG_UTILS_CLASS =
"org.apache.flink.python.util.PythonConfigUtil";
private static final String PYTHON_OPTIONS_CLASS = "org.apache.flink.python.PythonOptions";
private CommonPythonUtil() {}
public static Class<?> loadClass(String className, ClassLoader classLoader) {
try {
return Class.forName(className, false, classLoader);
} catch (ClassNotFoundException e) {
throw new TableException(
"The dependency of 'flink-python' is not present on the classpath.", e);
}
}
public static Configuration extractPythonConfiguration(
ReadableConfig tableConfig, ClassLoader classLoader) {
Class<?> clazz = loadClass(PYTHON_CONFIG_UTILS_CLASS, classLoader);
try {
Method method =
clazz.getDeclaredMethod("extractPythonConfiguration", ReadableConfig.class);
return (Configuration) method.invoke(null, tableConfig);
} catch (IllegalAccessException | NoSuchMethodException | InvocationTargetException e) {
throw new TableException("Method extractPythonConfiguration accessed failed.", e);
}
}
public static PythonFunctionInfo createPythonFunctionInfo(
RexCall pythonRexCall, Map<RexNode, Integer> inputNodes, ClassLoader classLoader) {
SqlOperator operator = pythonRexCall.getOperator();
try {
if (operator instanceof ScalarSqlFunction) {
return createPythonFunctionInfo(
pythonRexCall,
inputNodes,
((ScalarSqlFunction) operator).scalarFunction(),
classLoader);
} else if (operator instanceof TableSqlFunction) {
return createPythonFunctionInfo(
pythonRexCall,
inputNodes,
((TableSqlFunction) operator).udtf(),
classLoader);
} else if (operator instanceof BridgingSqlFunction) {
return createPythonFunctionInfo(
pythonRexCall,
inputNodes,
((BridgingSqlFunction) operator).getDefinition(),
classLoader);
}
} catch (InvocationTargetException | IllegalAccessException e) {
throw new TableException("Method pickleValue accessed failed. ", e);
}
throw new TableException(String.format("Unsupported Python SqlFunction %s.", operator));
}
@SuppressWarnings("unchecked")
public static boolean isPythonWorkerUsingManagedMemory(
Configuration config, ClassLoader classLoader) {
Class<?> clazz = loadClass(PYTHON_OPTIONS_CLASS, classLoader);
try {
return config.get(
(ConfigOption<Boolean>) (clazz.getField("USE_MANAGED_MEMORY").get(null)));
} catch (IllegalAccessException | NoSuchFieldException e) {
throw new TableException("Field USE_MANAGED_MEMORY accessed failed.", e);
}
}
@SuppressWarnings("unchecked")
public static boolean isPythonWorkerInProcessMode(
Configuration config, ClassLoader classLoader) {
Class<?> clazz = loadClass(PYTHON_OPTIONS_CLASS, classLoader);
try {
return config.get(
(ConfigOption<String>)
(clazz.getField("PYTHON_EXECUTION_MODE").get(null)))
.equalsIgnoreCase("process");
} catch (IllegalAccessException | NoSuchFieldException e) {
throw new TableException("Field PYTHON_EXECUTION_MODE accessed failed.", e);
}
}
public static Tuple2<PythonAggregateFunctionInfo[], DataViewSpec[][]>
extractPythonAggregateFunctionInfos(
AggregateInfoList pythonAggregateInfoList, AggregateCall[] aggCalls) {
List<PythonAggregateFunctionInfo> pythonAggregateFunctionInfoList = new ArrayList<>();
List<DataViewSpec[]> dataViewSpecList = new ArrayList<>();
AggregateInfo[] aggInfos = pythonAggregateInfoList.aggInfos();
for (int i = 0; i < aggInfos.length; i++) {
AggregateInfo aggInfo = aggInfos[i];
UserDefinedFunction function = aggInfo.function();
if (function instanceof PythonFunction) {
pythonAggregateFunctionInfoList.add(
new PythonAggregateFunctionInfo(
(PythonFunction) function,
Arrays.stream(aggInfo.argIndexes()).boxed().toArray(),
aggCalls[i].filterArg,
aggCalls[i].isDistinct()));
TypeInference typeInference = function.getTypeInference(null);
dataViewSpecList.add(
extractDataViewSpecs(
i,
typeInference
.getAccumulatorTypeStrategy()
.get()
.inferType(null)
.get()));
} else {
int filterArg = -1;
boolean distinct = false;
if (i < aggCalls.length) {
filterArg = aggCalls[i].filterArg;
distinct = aggCalls[i].isDistinct();
}
pythonAggregateFunctionInfoList.add(
new PythonAggregateFunctionInfo(
getBuiltInPythonAggregateFunction(function),
Arrays.stream(aggInfo.argIndexes()).boxed().toArray(),
filterArg,
distinct));
// The data views of the built in Python Aggregate Function are different from Java
// side, we will create the spec at Python side.
dataViewSpecList.add(new DataViewSpec[0]);
}
}
return Tuple2.of(
pythonAggregateFunctionInfoList.toArray(new PythonAggregateFunctionInfo[0]),
dataViewSpecList.toArray(new DataViewSpec[0][0]));
}
public static Tuple2<int[], PythonFunctionInfo[]>
extractPythonAggregateFunctionInfosFromAggregateCall(AggregateCall[] aggCalls) {
Map<Integer, Integer> inputNodes = new LinkedHashMap<>();
List<PythonFunctionInfo> pythonFunctionInfos = new ArrayList<>();
for (AggregateCall aggregateCall : aggCalls) {
List<Integer> inputs = new ArrayList<>();
List<Integer> argList = aggregateCall.getArgList();
for (Integer arg : argList) {
if (inputNodes.containsKey(arg)) {
inputs.add(inputNodes.get(arg));
} else {
Integer inputOffset = inputNodes.size();
inputs.add(inputOffset);
inputNodes.put(arg, inputOffset);
}
}
PythonFunction pythonFunction = null;
SqlAggFunction aggregateFunction = aggregateCall.getAggregation();
if (aggregateFunction instanceof AggSqlFunction) {
pythonFunction =
(PythonFunction) ((AggSqlFunction) aggregateFunction).aggregateFunction();
} else if (aggregateFunction instanceof BridgingSqlAggFunction) {
pythonFunction =
(PythonFunction)
((BridgingSqlAggFunction) aggregateFunction).getDefinition();
}
PythonFunctionInfo pythonFunctionInfo =
new PythonAggregateFunctionInfo(
pythonFunction,
inputs.toArray(),
aggregateCall.filterArg,
aggregateCall.isDistinct());
pythonFunctionInfos.add(pythonFunctionInfo);
}
int[] udafInputOffsets = inputNodes.keySet().stream().mapToInt(i -> i).toArray();
return Tuple2.of(udafInputOffsets, pythonFunctionInfos.toArray(new PythonFunctionInfo[0]));
}
public static DataViewSpec[] extractDataViewSpecs(int index, DataType accType) {
if (!(accType instanceof FieldsDataType)) {
return new DataViewSpec[0];
}
FieldsDataType compositeAccType = (FieldsDataType) accType;
if (includesDataView(compositeAccType)) {
LogicalType logicalType = compositeAccType.getLogicalType();
if (logicalType instanceof RowType) {
List<DataType> childrenDataTypes = compositeAccType.getChildren();
return IntStream.range(0, childrenDataTypes.size())
.mapToObj(
i -> {
DataType childDataType = childrenDataTypes.get(i);
LogicalType childLogicalType = childDataType.getLogicalType();
if ((childLogicalType instanceof RowType)
&& includesDataView((FieldsDataType) childDataType)) {
throw new TableException(
"For Python AggregateFunction, DataView cannot be used in the"
+ " nested columns of the accumulator. ");
} else if ((childLogicalType instanceof StructuredType)
&& ListView.class.isAssignableFrom(
((StructuredType) childLogicalType)
.getImplementationClass()
.get())) {
return new ListViewSpec(
"agg"
+ index
+ "$"
+ ((RowType) logicalType)
.getFieldNames()
.get(i),
i,
childDataType.getChildren().get(0));
} else if ((childLogicalType instanceof StructuredType)
&& MapView.class.isAssignableFrom(
((StructuredType) childLogicalType)
.getImplementationClass()
.get())) {
return new MapViewSpec(
"agg"
+ index
+ "$"
+ ((RowType) logicalType)
.getFieldNames()
.get(i),
i,
childDataType.getChildren().get(0),
false);
}
return null;
})
.filter(Objects::nonNull)
.toArray(DataViewSpec[]::new);
} else {
throw new TableException(
"For Python AggregateFunction you can only use DataView in " + "Row type.");
}
} else {
return new DataViewSpec[0];
}
}
private static boolean includesDataView(FieldsDataType fdt) {
return fdt.getChildren().stream()
.anyMatch(
childrenFieldsDataType -> {
LogicalType logicalType = childrenFieldsDataType.getLogicalType();
if (logicalType instanceof RowType) {
return includesDataView((FieldsDataType) childrenFieldsDataType);
} else if (logicalType instanceof StructuredType) {
return DataView.class.isAssignableFrom(
((StructuredType) logicalType)
.getImplementationClass()
.get());
} else {
return false;
}
});
}
private static byte[] convertLiteralToPython(
RexLiteral o, SqlTypeName typeName, ClassLoader classLoader)
throws InvocationTargetException, IllegalAccessException {
byte type;
Object value;
if (o.getValue3() == null) {
type = 0;
value = null;
} else {
switch (typeName) {
case TINYINT:
type = 0;
value = ((BigDecimal) o.getValue3()).byteValueExact();
break;
case SMALLINT:
type = 0;
value = ((BigDecimal) o.getValue3()).shortValueExact();
break;
case INTEGER:
type = 0;
value = ((BigDecimal) o.getValue3()).intValueExact();
break;
case BIGINT:
type = 0;
value = ((BigDecimal) o.getValue3()).longValueExact();
break;
case FLOAT:
type = 0;
value = ((BigDecimal) o.getValue3()).floatValue();
break;
case DOUBLE:
type = 0;
value = ((BigDecimal) o.getValue3()).doubleValue();
break;
case DECIMAL:
case BOOLEAN:
type = 0;
value = o.getValue3();
break;
case CHAR:
case VARCHAR:
type = 0;
value = o.getValue3().toString();
break;
case DATE:
type = 1;
value = o.getValue3();
break;
case TIME:
type = 2;
value = o.getValue3();
break;
case TIMESTAMP:
type = 3;
value = o.getValue3();
break;
default:
throw new RuntimeException("Unsupported type " + typeName);
}
}
loadPickleValue(classLoader);
return (byte[]) pickleValue.invoke(null, value, type);
}
private static void loadPickleValue(ClassLoader classLoader) {
if (pickleValue == null) {
synchronized (CommonPythonUtil.class) {
if (pickleValue == null) {
Class<?> clazz =
loadClass(
"org.apache.flink.api.common.python.PythonBridgeUtils",
classLoader);
try {
pickleValue = clazz.getMethod("pickleValue", Object.class, byte.class);
} catch (NoSuchMethodException e) {
throw new TableException("Method pickleValue loaded failed.", e);
}
}
}
}
}
private static PythonFunctionInfo createPythonFunctionInfo(
RexCall pythonRexCall,
Map<RexNode, Integer> inputNodes,
FunctionDefinition functionDefinition,
ClassLoader classLoader)
throws InvocationTargetException, IllegalAccessException {
ArrayList<Object> inputs = new ArrayList<>();
for (RexNode operand : pythonRexCall.getOperands()) {
if (operand instanceof RexCall) {
RexCall childPythonRexCall = (RexCall) operand;
if (childPythonRexCall.getOperator() instanceof SqlCastFunction
&& childPythonRexCall.getOperands().get(0) instanceof RexInputRef
&& childPythonRexCall.getOperands().get(0).getType()
instanceof TimeIndicatorRelDataType) {
operand = childPythonRexCall.getOperands().get(0);
} else {
PythonFunctionInfo argPythonInfo =
createPythonFunctionInfo(childPythonRexCall, inputNodes, classLoader);
inputs.add(argPythonInfo);
continue;
}
} else if (operand instanceof RexLiteral) {
RexLiteral literal = (RexLiteral) operand;
inputs.add(
convertLiteralToPython(
literal, literal.getType().getSqlTypeName(), classLoader));
continue;
}
assert operand instanceof RexInputRef;
if (inputNodes.containsKey(operand)) {
inputs.add(inputNodes.get(operand));
} else {
Integer inputOffset = inputNodes.size();
inputs.add(inputOffset);
inputNodes.put(operand, inputOffset);
}
}
return new PythonFunctionInfo((PythonFunction) functionDefinition, inputs.toArray());
}
private static BuiltInPythonAggregateFunction getBuiltInPythonAggregateFunction(
UserDefinedFunction javaBuiltInAggregateFunction) {
if (javaBuiltInAggregateFunction instanceof AvgAggFunction) {
return BuiltInPythonAggregateFunction.AVG;
}
if (javaBuiltInAggregateFunction instanceof Count1AggFunction) {
return BuiltInPythonAggregateFunction.COUNT1;
}
if (javaBuiltInAggregateFunction instanceof CountAggFunction) {
return BuiltInPythonAggregateFunction.COUNT;
}
if (javaBuiltInAggregateFunction instanceof FirstValueAggFunction) {
return BuiltInPythonAggregateFunction.FIRST_VALUE;
}
if (javaBuiltInAggregateFunction instanceof FirstValueWithRetractAggFunction) {
return BuiltInPythonAggregateFunction.FIRST_VALUE_RETRACT;
}
if (javaBuiltInAggregateFunction instanceof LastValueAggFunction) {
return BuiltInPythonAggregateFunction.LAST_VALUE;
}
if (javaBuiltInAggregateFunction instanceof LastValueWithRetractAggFunction) {
return BuiltInPythonAggregateFunction.LAST_VALUE_RETRACT;
}
if (javaBuiltInAggregateFunction instanceof ListAggFunction) {
return BuiltInPythonAggregateFunction.LIST_AGG;
}
if (javaBuiltInAggregateFunction instanceof ListAggWithRetractAggFunction) {
return BuiltInPythonAggregateFunction.LIST_AGG_RETRACT;
}
if (javaBuiltInAggregateFunction instanceof ListAggWsWithRetractAggFunction) {
return BuiltInPythonAggregateFunction.LIST_AGG_WS_RETRACT;
}
if (javaBuiltInAggregateFunction instanceof MaxAggFunction) {
return BuiltInPythonAggregateFunction.MAX;
}
if (javaBuiltInAggregateFunction instanceof MaxWithRetractAggFunction) {
return BuiltInPythonAggregateFunction.MAX_RETRACT;
}
if (javaBuiltInAggregateFunction instanceof MinAggFunction) {
return BuiltInPythonAggregateFunction.MIN;
}
if (javaBuiltInAggregateFunction instanceof MinWithRetractAggFunction) {
return BuiltInPythonAggregateFunction.MIN_RETRACT;
}
if (javaBuiltInAggregateFunction instanceof SumAggFunction) {
return BuiltInPythonAggregateFunction.SUM;
}
if (javaBuiltInAggregateFunction instanceof Sum0AggFunction.IntSum0AggFunction) {
return BuiltInPythonAggregateFunction.INT_SUM0;
}
if (javaBuiltInAggregateFunction instanceof Sum0AggFunction.ByteSum0AggFunction) {
return BuiltInPythonAggregateFunction.INT_SUM0;
}
if (javaBuiltInAggregateFunction instanceof Sum0AggFunction.ShortSum0AggFunction) {
return BuiltInPythonAggregateFunction.INT_SUM0;
}
if (javaBuiltInAggregateFunction instanceof Sum0AggFunction.LongSum0AggFunction) {
return BuiltInPythonAggregateFunction.INT_SUM0;
}
if (javaBuiltInAggregateFunction instanceof Sum0AggFunction.FloatSum0AggFunction) {
return BuiltInPythonAggregateFunction.FLOAT_SUM0;
}
if (javaBuiltInAggregateFunction instanceof Sum0AggFunction.DoubleSum0AggFunction) {
return BuiltInPythonAggregateFunction.FLOAT_SUM0;
}
if (javaBuiltInAggregateFunction instanceof Sum0AggFunction.DecimalSum0AggFunction) {
return BuiltInPythonAggregateFunction.DECIMAL_SUM0;
}
if (javaBuiltInAggregateFunction instanceof SumWithRetractAggFunction) {
return BuiltInPythonAggregateFunction.SUM_RETRACT;
}
throw new TableException(
"Aggregate function "
+ javaBuiltInAggregateFunction
+ " is still not supported to be mixed with Python UDAF.");
}
}
| CommonPythonUtil |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/bean/override/mockito/MockitoSpyBeanConfigurationErrorTests.java | {
"start": 5330,
"end": 5539
} | class ____ {
@MockitoSpyBean
MySelfInjectionScopedProxy mySelfInjectionScopedProxy;
}
@Component("myScopedProxy")
@Scope(proxyMode = ScopedProxyMode.TARGET_CLASS)
static | SelfInjectionScopedProxyTestCase |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/servlet/client/samples/SoftAssertionTests.java | {
"start": 1069,
"end": 2077
} | class ____ {
private final RestTestClient restTestClient = RestTestClient.bindToController(new TestController()).build();
@Test
void expectAll() {
this.restTestClient.get().uri("/test").exchange()
.expectAll(
responseSpec -> responseSpec.expectStatus().isOk(),
responseSpec -> responseSpec.expectBody(String.class).isEqualTo("hello")
);
}
@Test
void expectAllWithMultipleFailures() {
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() ->
this.restTestClient.get().uri("/test").exchange()
.expectAll(
responseSpec -> responseSpec.expectStatus().isBadRequest(),
responseSpec -> responseSpec.expectStatus().isOk(),
responseSpec -> responseSpec.expectBody(String.class).isEqualTo("bogus")
)
)
.withMessage("""
Multiple Exceptions (2):
Status expected:<400 BAD_REQUEST> but was:<200 OK>
Response body expected:<bogus> but was:<hello>""");
}
@RestController
private static | SoftAssertionTests |
java | apache__maven | compat/maven-compat/src/main/java/org/apache/maven/profiles/activation/ProfileActivator.java | {
"start": 942,
"end": 1154
} | interface ____ {
String ROLE = ProfileActivator.class.getName();
boolean canDetermineActivation(Profile profile);
boolean isActive(Profile profile) throws ProfileActivationException;
}
| ProfileActivator |
java | apache__camel | components/camel-aws/camel-aws2-s3/src/test/java/org/apache/camel/component/aws2/s3/integration/S3StreamUploadMultipartAsyncIT.java | {
"start": 1645,
"end": 3896
} | class ____ extends Aws2S3Base {
@EndpointInject
private ProducerTemplate template;
@EndpointInject("mock:result")
private MockEndpoint result;
@Test
public void sendIn() throws Exception {
result.expectedMessageCount(10);
for (int i = 0; i < 10; i++) {
final CompletableFuture<Exchange> future = template.asyncSend("direct:stream1", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(AWS2S3Constants.KEY, "empty.bin");
exchange.getIn().setBody(new File("src/test/resources/empty.bin"));
}
});
assertDoesNotThrow(() -> future.get(5, TimeUnit.SECONDS));
}
MockEndpoint.assertIsSatisfied(context, 10, TimeUnit.SECONDS);
Exchange ex = template.request("direct:listObjects", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(AWS2S3Constants.S3_OPERATION, AWS2S3Operations.listObjects);
}
});
// file size: 5,242,880 bytes, 10 * (5 chunks of 1,000,000 + remainder of 242,880)
List<S3Object> resp = ex.getMessage().getBody(List.class);
assertEquals(60, resp.size());
assertEquals(10 * Files.size(Paths.get("src/test/resources/empty.bin")),
resp.stream().mapToLong(S3Object::size).sum());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
String awsEndpoint1
= String.format(
"aws2-s3://%s?autoCreateBucket=true&streamingUploadMode=true&keyName=fileTest.txt&batchMessageNumber=25&namingStrategy=random",
name.get());
from("direct:stream1").to(awsEndpoint1).to("mock:result");
String awsEndpoint = String.format("aws2-s3://%s?autoCreateBucket=true",
name.get());
from("direct:listObjects").to(awsEndpoint);
}
};
}
}
| S3StreamUploadMultipartAsyncIT |
java | spring-projects__spring-boot | module/spring-boot-micrometer-tracing-opentelemetry/src/test/java/org/springframework/boot/micrometer/tracing/opentelemetry/autoconfigure/OpenTelemetryTracingAutoConfigurationTests.java | {
"start": 4024,
"end": 18286
} | class ____ {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(
org.springframework.boot.opentelemetry.autoconfigure.OpenTelemetrySdkAutoConfiguration.class,
OpenTelemetryTracingAutoConfiguration.class));
@Test
void shouldSupplyBeans() {
this.contextRunner.run((context) -> {
assertThat(context).hasSingleBean(OtelTracer.class);
assertThat(context).hasSingleBean(EventPublisher.class);
assertThat(context).hasSingleBean(OtelCurrentTraceContext.class);
assertThat(context).hasSingleBean(SdkTracerProvider.class);
assertThat(context).hasSingleBean(ContextPropagators.class);
assertThat(context).hasSingleBean(Sampler.class);
assertThat(context).hasSingleBean(Tracer.class);
assertThat(context).hasSingleBean(Slf4JEventListener.class);
assertThat(context).hasSingleBean(Slf4JBaggageEventListener.class);
assertThat(context).hasSingleBean(SpanProcessor.class);
assertThat(context).hasSingleBean(OtelPropagator.class);
assertThat(context).hasSingleBean(TextMapPropagator.class);
assertThat(context).hasSingleBean(OtelSpanCustomizer.class);
assertThat(context).hasSingleBean(SpanProcessors.class);
assertThat(context).hasSingleBean(SpanExporters.class);
});
}
@Test
void samplerIsParentBased() {
this.contextRunner.run((context) -> {
Sampler sampler = context.getBean(Sampler.class);
assertThat(sampler).isNotNull();
assertThat(sampler.getDescription()).startsWith("ParentBased{");
});
}
@ParameterizedTest
@ValueSource(strings = { "io.micrometer.tracing.otel", "io.opentelemetry.sdk", "io.opentelemetry.api" })
void shouldNotSupplyBeansIfDependencyIsMissing(String packageName) {
this.contextRunner.withClassLoader(new FilteredClassLoader(packageName)).run((context) -> {
assertThat(context).doesNotHaveBean(OtelTracer.class);
assertThat(context).doesNotHaveBean(EventPublisher.class);
assertThat(context).doesNotHaveBean(OtelCurrentTraceContext.class);
assertThat(context).doesNotHaveBean(SdkTracerProvider.class);
assertThat(context).doesNotHaveBean(ContextPropagators.class);
assertThat(context).doesNotHaveBean(Sampler.class);
assertThat(context).doesNotHaveBean(Tracer.class);
assertThat(context).doesNotHaveBean(Slf4JEventListener.class);
assertThat(context).doesNotHaveBean(Slf4JBaggageEventListener.class);
assertThat(context).doesNotHaveBean(SpanProcessor.class);
assertThat(context).doesNotHaveBean(OtelPropagator.class);
assertThat(context).doesNotHaveBean(TextMapPropagator.class);
assertThat(context).doesNotHaveBean(OtelSpanCustomizer.class);
assertThat(context).doesNotHaveBean(SpanProcessors.class);
assertThat(context).doesNotHaveBean(SpanExporters.class);
});
}
@Test
void shouldBackOffOnCustomBeans() {
this.contextRunner.withUserConfiguration(CustomConfiguration.class).run((context) -> {
assertThat(context).hasBean("customMicrometerTracer");
assertThat(context).hasSingleBean(io.micrometer.tracing.Tracer.class);
assertThat(context).hasBean("customEventPublisher");
assertThat(context).hasSingleBean(EventPublisher.class);
assertThat(context).hasBean("customOtelCurrentTraceContext");
assertThat(context).hasSingleBean(OtelCurrentTraceContext.class);
assertThat(context).hasBean("customSdkTracerProvider");
assertThat(context).hasSingleBean(SdkTracerProvider.class);
assertThat(context).hasBean("customContextPropagators");
assertThat(context).hasSingleBean(ContextPropagators.class);
assertThat(context).hasBean("customSampler");
assertThat(context).hasSingleBean(Sampler.class);
assertThat(context).hasBean("customTracer");
assertThat(context).hasSingleBean(Tracer.class);
assertThat(context).hasBean("customSlf4jEventListener");
assertThat(context).hasSingleBean(Slf4JEventListener.class);
assertThat(context).hasBean("customSlf4jBaggageEventListener");
assertThat(context).hasSingleBean(Slf4JBaggageEventListener.class);
assertThat(context).hasBean("customOtelPropagator");
assertThat(context).hasSingleBean(OtelPropagator.class);
assertThat(context).hasBean("customSpanCustomizer");
assertThat(context).hasSingleBean(SpanCustomizer.class);
assertThat(context).hasBean("customSpanProcessors");
assertThat(context).hasSingleBean(SpanProcessors.class);
assertThat(context).hasBean("customSpanExporters");
assertThat(context).hasSingleBean(SpanExporters.class);
assertThat(context).hasBean("customBatchSpanProcessor");
assertThat(context).hasSingleBean(BatchSpanProcessor.class);
});
}
@Test
void shouldSetupDefaultResourceAttributes() {
this.contextRunner
.withConfiguration(
AutoConfigurations.of(ObservationAutoConfiguration.class, MicrometerTracingAutoConfiguration.class))
.withUserConfiguration(InMemoryRecordingSpanExporterConfiguration.class)
.withPropertyValues("management.tracing.sampling.probability=1.0")
.run((context) -> {
context.getBean(io.micrometer.tracing.Tracer.class).nextSpan().name("test").end();
InMemoryRecordingSpanExporter exporter = context.getBean(InMemoryRecordingSpanExporter.class);
exporter.await(Duration.ofSeconds(10));
SpanData spanData = exporter.getExportedSpans().get(0);
Map<AttributeKey<?>, Object> expectedAttributes = Resource.getDefault()
.merge(Resource.create(Attributes.of(AttributeKey.stringKey("service.name"), "unknown_service")))
.getAttributes()
.asMap();
assertThat(spanData.getResource().getAttributes().asMap()).isEqualTo(expectedAttributes);
});
}
@Test
void shouldAllowMultipleSpanProcessors() {
this.contextRunner.withUserConfiguration(AdditionalSpanProcessorConfiguration.class).run((context) -> {
assertThat(context.getBeansOfType(SpanProcessor.class)).hasSize(2);
assertThat(context).hasBean("customSpanProcessor");
SpanProcessors spanProcessors = context.getBean(SpanProcessors.class);
assertThat(spanProcessors).hasSize(2);
});
}
@Test
void shouldAllowMultipleSpanExporters() {
this.contextRunner.withUserConfiguration(MultipleSpanExporterConfiguration.class).run((context) -> {
assertThat(context.getBeansOfType(SpanExporter.class)).hasSize(2);
assertThat(context).hasBean("spanExporter1");
assertThat(context).hasBean("spanExporter2");
SpanExporters spanExporters = context.getBean(SpanExporters.class);
assertThat(spanExporters).hasSize(2);
});
}
@Test
void shouldAllowMultipleTextMapPropagators() {
this.contextRunner.withUserConfiguration(CustomConfiguration.class).run((context) -> {
assertThat(context.getBeansOfType(TextMapPropagator.class)).hasSize(2);
assertThat(context).hasBean("customTextMapPropagator");
});
}
@Test
void shouldNotSupplySlf4jBaggageEventListenerWhenBaggageCorrelationDisabled() {
this.contextRunner.withPropertyValues("management.tracing.baggage.correlation.enabled=false")
.run((context) -> assertThat(context).doesNotHaveBean(Slf4JBaggageEventListener.class));
}
@Test
void shouldNotSupplySlf4JBaggageEventListenerWhenBaggageDisabled() {
this.contextRunner.withPropertyValues("management.tracing.baggage.enabled=false")
.run((context) -> assertThat(context).doesNotHaveBean(Slf4JBaggageEventListener.class));
}
@Test
void shouldSupplyB3PropagationIfPropagationPropertySet() {
this.contextRunner.withPropertyValues("management.tracing.propagation.type=B3").run((context) -> {
TextMapPropagator propagator = context.getBean(TextMapPropagator.class);
List<TextMapPropagator> injectors = getInjectors(propagator);
assertThat(injectors).hasExactlyElementsOfTypes(B3Propagator.class, BaggageTextMapPropagator.class);
});
}
@Test
void shouldSupplyB3PropagationIfPropagationPropertySetAndBaggageDisabled() {
this.contextRunner
.withPropertyValues("management.tracing.propagation.type=B3", "management.tracing.baggage.enabled=false")
.run((context) -> {
TextMapPropagator propagator = context.getBean(TextMapPropagator.class);
List<TextMapPropagator> injectors = getInjectors(propagator);
assertThat(injectors).hasExactlyElementsOfTypes(B3Propagator.class);
});
}
@Test
void shouldSupplyW3CPropagationWithBaggageByDefault() {
this.contextRunner.withPropertyValues("management.tracing.baggage.remote-fields=foo").run((context) -> {
TextMapPropagator propagator = context.getBean(TextMapPropagator.class);
List<TextMapPropagator> injectors = getInjectors(propagator);
List<String> fields = new ArrayList<>();
for (TextMapPropagator injector : injectors) {
fields.addAll(injector.fields());
}
assertThat(fields).containsExactly("traceparent", "tracestate", "baggage", "foo");
});
}
@Test
void shouldSupplyW3CPropagationWithoutBaggageWhenDisabled() {
this.contextRunner.withPropertyValues("management.tracing.baggage.enabled=false").run((context) -> {
TextMapPropagator propagator = context.getBean(TextMapPropagator.class);
List<TextMapPropagator> injectors = getInjectors(propagator);
assertThat(injectors).hasExactlyElementsOfTypes(W3CTraceContextPropagator.class);
});
}
@Test
void shouldConfigureRemoteAndTaggedFields() {
this.contextRunner
.withPropertyValues("management.tracing.baggage.remote-fields=r1",
"management.tracing.baggage.tag-fields=t1")
.run((context) -> {
CompositeTextMapPropagator propagator = context.getBean(CompositeTextMapPropagator.class);
assertThat(propagator).extracting("baggagePropagator.baggageManager.remoteFields")
.asInstanceOf(InstanceOfAssertFactories.list(String.class))
.containsExactly("r1");
assertThat(propagator).extracting("baggagePropagator.baggageManager.tagFields")
.asInstanceOf(InstanceOfAssertFactories.list(String.class))
.containsExactly("t1");
});
}
@Test
void shouldCustomizeSdkTracerProvider() {
this.contextRunner.withUserConfiguration(SdkTracerProviderCustomizationConfiguration.class).run((context) -> {
SdkTracerProvider tracerProvider = context.getBean(SdkTracerProvider.class);
assertThat(tracerProvider.getSpanLimits().getMaxNumberOfEvents()).isEqualTo(42);
assertThat(tracerProvider.getSampler()).isEqualTo(Sampler.alwaysOn());
});
}
@Test
void defaultSpanProcessorShouldUseMeterProviderIfAvailable() {
this.contextRunner.withUserConfiguration(MeterProviderConfiguration.class).run((context) -> {
MeterProvider meterProvider = context.getBean(MeterProvider.class);
assertThat(Mockito.mockingDetails(meterProvider).isMock()).isTrue();
then(meterProvider).should().meterBuilder(anyString());
});
}
@Test
void shouldDisablePropagationIfTracingIsDisabled() {
this.contextRunner.withPropertyValues("management.tracing.export.enabled=false").run((context) -> {
assertThat(context).hasSingleBean(TextMapPropagator.class);
TextMapPropagator propagator = context.getBean(TextMapPropagator.class);
assertThat(propagator.fields()).isEmpty();
});
}
@Test
void batchSpanProcessorShouldBeConfiguredWithCustomProperties() {
this.contextRunner
.withPropertyValues("management.opentelemetry.tracing.export.timeout=45s",
"management.opentelemetry.tracing.export.include-unsampled=true",
"management.opentelemetry.tracing.export.max-batch-size=256",
"management.opentelemetry.tracing.export.max-queue-size=4096",
"management.opentelemetry.tracing.export.schedule-delay=15s")
.run((context) -> {
assertThat(context).hasSingleBean(BatchSpanProcessor.class);
BatchSpanProcessor batchSpanProcessor = context.getBean(BatchSpanProcessor.class);
assertThat(batchSpanProcessor).hasFieldOrPropertyWithValue("exportUnsampledSpans", true)
.extracting("worker")
.hasFieldOrPropertyWithValue("exporterTimeoutNanos", Duration.ofSeconds(45).toNanos())
.hasFieldOrPropertyWithValue("maxExportBatchSize", 256)
.hasFieldOrPropertyWithValue("scheduleDelayNanos", Duration.ofSeconds(15).toNanos())
.extracting("queue")
.satisfies((queue) -> assertThat(ReflectionTestUtils.<Integer>invokeMethod(queue, "capacity"))
.isEqualTo(4096));
});
}
@Test
void batchSpanProcessorShouldBeConfiguredWithDefaultProperties() {
this.contextRunner.run((context) -> {
assertThat(context).hasSingleBean(BatchSpanProcessor.class);
BatchSpanProcessor batchSpanProcessor = context.getBean(BatchSpanProcessor.class);
assertThat(batchSpanProcessor).hasFieldOrPropertyWithValue("exportUnsampledSpans", false)
.extracting("worker")
.hasFieldOrPropertyWithValue("exporterTimeoutNanos", Duration.ofSeconds(30).toNanos())
.hasFieldOrPropertyWithValue("maxExportBatchSize", 512)
.hasFieldOrPropertyWithValue("scheduleDelayNanos", Duration.ofSeconds(5).toNanos())
.extracting("queue")
.satisfies((queue) -> assertThat(ReflectionTestUtils.<Integer>invokeMethod(queue, "capacity"))
.isEqualTo(2048));
});
}
@Test // gh-41439
@ForkedClassPath
void shouldPublishEventsWhenContextStorageIsInitializedEarly() {
this.contextRunner.withInitializer(this::initializeOpenTelemetry)
.withUserConfiguration(OtelEventListener.class)
.run((context) -> {
OtelEventListener listener = context.getBean(OtelEventListener.class);
io.micrometer.tracing.Tracer micrometerTracer = context.getBean(io.micrometer.tracing.Tracer.class);
io.micrometer.tracing.Span span = micrometerTracer.nextSpan().name("test");
try (SpanInScope scoped = micrometerTracer.withSpan(span.start())) {
assertThat(listener.events).isNotEmpty();
}
finally {
span.end();
}
});
}
private void initializeOpenTelemetry(ConfigurableApplicationContext context) {
context.addApplicationListener(new OpenTelemetryEventPublisherBeansApplicationListener());
Span.current();
}
private List<TextMapPropagator> getInjectors(TextMapPropagator propagator) {
assertThat(propagator).as("propagator").isNotNull();
if (propagator instanceof CompositeTextMapPropagator compositePropagator) {
return compositePropagator.getInjectors().stream().toList();
}
fail("Expected CompositeTextMapPropagator, found %s".formatted(propagator.getClass()));
throw new AssertionError("Unreachable");
}
@Configuration(proxyBeanMethods = false)
private static final | OpenTelemetryTracingAutoConfigurationTests |
java | apache__flink | flink-table/flink-table-code-splitter/src/test/resources/block/expected/TestIfStatementRewrite3.java | {
"start": 7,
"end": 1037
} | class ____ {
public void myFun1(int[] a, int[] b) throws RuntimeException {
if (a[0] == 0) {
myFun1_0_0(a, b);
} else if (a[1] == 22) {
myFun1_0_1_2(a, b);
} else if (a[3] == 0) {
myFun1_0_1_3_4(a, b);
} else if (a[4] == 0) {
myFun1_0_1_3_5_6(a, b);
} else {
myFun1_0_1_3_5_7(a, b);
}
}
void myFun1_0_1_3_4(int[] a, int[] b) throws RuntimeException {
a[3] = b[3];
a[33] = b[33];
}
void myFun1_0_1_2(int[] a, int[] b) throws RuntimeException {
a[1] = b[12];
a[2] = b[22];
}
void myFun1_0_1_3_5_6(int[] a, int[] b) throws RuntimeException {
a[4] = b[4];
a[44] = b[44];
}
void myFun1_0_1_3_5_7(int[] a, int[] b) throws RuntimeException {
a[0] = b[0];
a[1] = b[1];
a[2] = b[2];
}
void myFun1_0_0(int[] a, int[] b) throws RuntimeException {
a[0] = 1;
a[1] = 1;
}
}
| TestIfStatementRewrite3 |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/security/oauthbearer/internals/secured/assertion/AssertionJwtTemplate.java | {
"start": 2150,
"end": 2813
} | interface ____ extends Closeable {
/**
* Returns a map containing zero or more header values.
*
* @return Values to include in the JWT header
*/
Map<String, Object> header();
/**
* Returns a map containing zero or more JWT payload claim values.
*
* @return Values to include in the JWT payload
*/
Map<String, Object> payload();
/**
* Closes any resources used by this implementation. The default implementation of
* this method is a no op, for convenience to implementors.
*/
@Override
default void close() throws IOException {
// Do nothing...
}
}
| AssertionJwtTemplate |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.