language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/batch/BatchAndBagCollectionTest.java | {
"start": 1317,
"end": 2618
} | class ____ {
@BeforeAll
public void setUp(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
EntityA entityA = new EntityA( 1 );
EntityA childA1 = new EntityA( 2 );
EntityA childA2 = new EntityA( 3 );
EntityB entityB1 = new EntityB();
EntityB entityB2 = new EntityB();
EntityB entityB3 = new EntityB();
entityA.addChild( childA1 );
entityA.addChild( childA2 );
childA1.setListOfEntitiesB( List.of( entityB1, entityB2, entityB3 ) );
session.persist( entityA );
session.persist( childA1 );
session.persist( childA2 );
session.persist( entityB1 );
session.persist( entityB2 );
session.persist( entityB3 );
}
);
}
@Test
public void testOneToManyHasCorrectSize(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
List<EntityA> entitiesA = session.createQuery(
"select a from EntityA a where a.parent is null",
EntityA.class
)
.getResultList();
assertThat( entitiesA ).hasSize( 1 );
EntityA entityA = entitiesA.get( 0 );
assertThat( entityA.getId() ).isEqualTo( 1 );
assertThat( entityA.getChildren() ).hasSize( 2 );
}
);
}
@Entity(name = "EntityA")
@Table(name = "ENTITY_A")
public static | BatchAndBagCollectionTest |
java | spring-projects__spring-framework | spring-aop/src/main/java/org/springframework/aop/support/annotation/AnnotationMatchingPointcut.java | {
"start": 3036,
"end": 5118
} | class ____
* (can be {@code null})
* @param methodAnnotationType the annotation type to look for at the method level
* (can be {@code null})
* @param checkInherited whether to also check the superclasses and interfaces
* as well as meta-annotations for the annotation type
* @since 5.0
* @see AnnotationClassFilter#AnnotationClassFilter(Class, boolean)
* @see AnnotationMethodMatcher#AnnotationMethodMatcher(Class, boolean)
*/
@SuppressWarnings("NullAway") // Dataflow analysis limitation
public AnnotationMatchingPointcut(@Nullable Class<? extends Annotation> classAnnotationType,
@Nullable Class<? extends Annotation> methodAnnotationType, boolean checkInherited) {
Assert.isTrue((classAnnotationType != null || methodAnnotationType != null),
"Either Class annotation type or Method annotation type needs to be specified (or both)");
if (classAnnotationType != null) {
this.classFilter = new AnnotationClassFilter(classAnnotationType, checkInherited);
}
else {
this.classFilter = new AnnotationCandidateClassFilter(methodAnnotationType);
}
if (methodAnnotationType != null) {
this.methodMatcher = new AnnotationMethodMatcher(methodAnnotationType, checkInherited);
}
else {
this.methodMatcher = MethodMatcher.TRUE;
}
}
@Override
public ClassFilter getClassFilter() {
return this.classFilter;
}
@Override
public MethodMatcher getMethodMatcher() {
return this.methodMatcher;
}
@Override
public boolean equals(@Nullable Object other) {
return (this == other || (other instanceof AnnotationMatchingPointcut otherPointcut &&
this.classFilter.equals(otherPointcut.classFilter) &&
this.methodMatcher.equals(otherPointcut.methodMatcher)));
}
@Override
public int hashCode() {
return this.classFilter.hashCode() * 37 + this.methodMatcher.hashCode();
}
@Override
public String toString() {
return "AnnotationMatchingPointcut: " + this.classFilter + ", " + this.methodMatcher;
}
/**
* Factory method for an AnnotationMatchingPointcut that matches
* for the specified annotation at the | level |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DataDrivenDBRecordReader.java | {
"start": 1389,
"end": 4529
} | class ____<T extends DBWritable> extends DBRecordReader<T> {
private static final Logger LOG =
LoggerFactory.getLogger(DataDrivenDBRecordReader.class);
private String dbProductName; // database manufacturer string.
/**
* @param split The InputSplit to read data for
* @throws SQLException
*/
public DataDrivenDBRecordReader(DBInputFormat.DBInputSplit split,
Class<T> inputClass, Configuration conf, Connection conn, DBConfiguration dbConfig,
String cond, String [] fields, String table, String dbProduct)
throws SQLException {
super(split, inputClass, conf, conn, dbConfig, cond, fields, table);
this.dbProductName = dbProduct;
}
/** Returns the query for selecting the records,
* subclasses can override this for custom behaviour.*/
@SuppressWarnings("unchecked")
protected String getSelectQuery() {
StringBuilder query = new StringBuilder();
DataDrivenDBInputFormat.DataDrivenDBInputSplit dataSplit =
(DataDrivenDBInputFormat.DataDrivenDBInputSplit) getSplit();
DBConfiguration dbConf = getDBConf();
String [] fieldNames = getFieldNames();
String tableName = getTableName();
String conditions = getConditions();
// Build the WHERE clauses associated with the data split first.
// We need them in both branches of this function.
StringBuilder conditionClauses = new StringBuilder();
conditionClauses.append("( ").append(dataSplit.getLowerClause());
conditionClauses.append(" ) AND ( ").append(dataSplit.getUpperClause());
conditionClauses.append(" )");
if(dbConf.getInputQuery() == null) {
// We need to generate the entire query.
query.append("SELECT ");
for (int i = 0; i < fieldNames.length; i++) {
query.append(fieldNames[i]);
if (i != fieldNames.length -1) {
query.append(", ");
}
}
query.append(" FROM ").append(tableName);
if (!dbProductName.startsWith("ORACLE")) {
// Seems to be necessary for hsqldb? Oracle explicitly does *not*
// use this clause.
query.append(" AS ").append(tableName);
}
query.append(" WHERE ");
if (conditions != null && conditions.length() > 0) {
// Put the user's conditions first.
query.append("( ").append(conditions).append(" ) AND ");
}
// Now append the conditions associated with our split.
query.append(conditionClauses.toString());
} else {
// User provided the query. We replace the special token with our WHERE clause.
String inputQuery = dbConf.getInputQuery();
if (inputQuery.indexOf(DataDrivenDBInputFormat.SUBSTITUTE_TOKEN) == -1) {
LOG.error("Could not find the clause substitution token "
+ DataDrivenDBInputFormat.SUBSTITUTE_TOKEN + " in the query: ["
+ inputQuery + "]. Parallel splits may not work correctly.");
}
query.append(inputQuery.replace(DataDrivenDBInputFormat.SUBSTITUTE_TOKEN,
conditionClauses.toString()));
}
LOG.debug("Using query: " + query.toString());
return query.toString();
}
}
| DataDrivenDBRecordReader |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/SortArgs.java | {
"start": 731,
"end": 1026
} | class ____ implements CompositeArgument {
private String by;
private Limit limit = Limit.unlimited();
private List<String> get;
private CommandKeyword order;
private boolean alpha;
/**
* Builder entry points for {@link SortArgs}.
*/
public static | SortArgs |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/common/processor/src/main/java/org/jboss/resteasy/reactive/common/processor/scanning/ResourceScanningResult.java | {
"start": 285,
"end": 2935
} | class ____ {
private final IndexView index;
final Map<DotName, ClassInfo> scannedResources;
final Map<DotName, String> scannedResourcePaths;
final Map<DotName, ClassInfo> possibleSubResources;
final Map<DotName, String> pathInterfaces;
final Map<DotName, String> clientInterfaces;
final Map<DotName, MethodInfo> resourcesThatNeedCustomProducer;
final Map<DotName, String> httpAnnotationToMethod;
final List<MethodInfo> classLevelExceptionMappers;
final Set<DotName> requestScopedResources;
public ResourceScanningResult(IndexView index, Map<DotName, ClassInfo> scannedResources,
Map<DotName, String> scannedResourcePaths,
Map<DotName, ClassInfo> possibleSubResources, Map<DotName, String> pathInterfaces,
Map<DotName, String> clientInterfaces,
Map<DotName, MethodInfo> resourcesThatNeedCustomProducer,
Map<DotName, String> httpAnnotationToMethod, List<MethodInfo> classLevelExceptionMappers,
Set<DotName> requestScopedResources) {
this.index = index;
this.scannedResources = scannedResources;
this.scannedResourcePaths = scannedResourcePaths;
this.possibleSubResources = possibleSubResources;
this.pathInterfaces = pathInterfaces;
this.clientInterfaces = clientInterfaces;
this.resourcesThatNeedCustomProducer = resourcesThatNeedCustomProducer;
this.httpAnnotationToMethod = httpAnnotationToMethod;
this.classLevelExceptionMappers = classLevelExceptionMappers;
this.requestScopedResources = requestScopedResources;
}
public IndexView getIndex() {
return index;
}
public Map<DotName, ClassInfo> getScannedResources() {
return scannedResources;
}
public Map<DotName, String> getScannedResourcePaths() {
return scannedResourcePaths;
}
public Map<DotName, ClassInfo> getPossibleSubResources() {
return possibleSubResources;
}
public Map<DotName, String> getPathInterfaces() {
return pathInterfaces;
}
public Map<DotName, String> getClientInterfaces() {
return clientInterfaces;
}
public Map<DotName, MethodInfo> getResourcesThatNeedCustomProducer() {
return resourcesThatNeedCustomProducer;
}
public Map<DotName, String> getHttpAnnotationToMethod() {
return httpAnnotationToMethod;
}
public List<MethodInfo> getClassLevelExceptionMappers() {
return classLevelExceptionMappers;
}
public Set<DotName> getRequestScopedResources() {
return requestScopedResources;
}
}
| ResourceScanningResult |
java | quarkusio__quarkus | extensions/smallrye-health/runtime/src/main/java/io/quarkus/smallrye/health/runtime/SmallRyeLivenessHandler.java | {
"start": 216,
"end": 448
} | class ____ extends SmallRyeHealthHandlerBase {
@Override
protected Uni<SmallRyeHealth> getHealth(SmallRyeHealthReporter reporter, RoutingContext ctx) {
return reporter.getLivenessAsync();
}
}
| SmallRyeLivenessHandler |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/reflect/FieldUtils.java | {
"start": 3358,
"end": 3896
} | class ____ {@code null}.
* @throws IllegalArgumentException
* if the field name is {@code null}, blank, or empty.
* @throws SecurityException if an underlying accessible object's method denies the request.
* @see SecurityManager#checkPermission
*/
public static Field getDeclaredField(final Class<?> cls, final String fieldName) {
return getDeclaredField(cls, fieldName, false);
}
/**
* Gets an accessible {@link Field} by name, breaking scope if requested. Only the specified | is |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-sample/src/main/java/org/springframework/cloud/gateway/sample/GatewaySampleApplication.java | {
"start": 6572,
"end": 6817
} | class ____ {
String message;
Hello() {
}
Hello(String message) {
this.message = message;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
}
}
| Hello |
java | apache__flink | flink-state-backends/flink-statebackend-forst/src/test/java/org/apache/flink/state/forst/ForStNativeMetricMonitorTest.java | {
"start": 8289,
"end": 10134
} | class ____ implements MetricRegistry {
List<ForStNativeMetricMonitor.ForStNativePropertyMetricView> propertyMetrics =
new ArrayList<>();
List<ForStNativeMetricMonitor.ForStNativeStatisticsMetricView> statisticsMetrics =
new ArrayList<>();
@Override
public char getDelimiter() {
return 0;
}
@Override
public int getNumberReporters() {
return 0;
}
@Override
public void addSpan(SpanBuilder spanBuilder, AbstractMetricGroup<?> group) {}
@Override
public void register(Metric metric, String metricName, AbstractMetricGroup group) {
if (metric instanceof ForStNativeMetricMonitor.ForStNativePropertyMetricView) {
propertyMetrics.add(
(ForStNativeMetricMonitor.ForStNativePropertyMetricView) metric);
} else if (metric instanceof ForStNativeMetricMonitor.ForStNativeStatisticsMetricView) {
statisticsMetrics.add(
(ForStNativeMetricMonitor.ForStNativeStatisticsMetricView) metric);
}
}
@Override
public void unregister(Metric metric, String metricName, AbstractMetricGroup group) {}
@Override
public void addEvent(EventBuilder eventBuilder, AbstractMetricGroup<?> group) {}
@Override
public ScopeFormats getScopeFormats() {
Configuration config = new Configuration();
config.set(MetricOptions.SCOPE_NAMING_TM, "A");
config.set(MetricOptions.SCOPE_NAMING_TM_JOB, "B");
config.set(MetricOptions.SCOPE_NAMING_TASK, "C");
config.set(MetricOptions.SCOPE_NAMING_OPERATOR, "D");
return ScopeFormats.fromConfig(config);
}
}
}
| SimpleMetricRegistry |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/injection/erroneous/interceptorBean/InterceptorBeanInjectionInitializerTest.java | {
"start": 912,
"end": 1023
} | class ____ {
@Inject
void initMethod(Interceptor<MyBean> interceptor) {
}
}
}
| MyBean |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/util/AssertTests.java | {
"start": 15965,
"end": 17117
} | class ____.lang.String");
}
@Test
void isInstanceOfWithTypeMismatchAndCustomMessageWithSpace() {
assertThatIllegalArgumentException().isThrownBy(() ->
Assert.isInstanceOf(String.class, 42L, "Custom message for "))
.withMessageContaining("Custom message for java.lang.Long");
}
@Test
void isInstanceOfWithMessageSupplier() {
Assert.isInstanceOf(String.class, "foo", () -> "enigma");
}
@Test
void isInstanceOfWithNullTypeAndMessageSupplier() {
assertThatIllegalArgumentException().isThrownBy(() ->
Assert.isInstanceOf(null, "foo", () -> "enigma"))
.withMessageContaining("Type to check against must not be null");
}
@Test
void isInstanceOfWithNullInstanceAndMessageSupplier() {
assertThatIllegalArgumentException().isThrownBy(() ->
Assert.isInstanceOf(String.class, null, () -> "enigma"))
.withMessageContaining("enigma: null");
}
@Test
void isInstanceOfWithTypeMismatchAndNullMessageSupplier() {
assertThatIllegalArgumentException().isThrownBy(() ->
Assert.isInstanceOf(String.class, 42L, (Supplier<String>) null))
.withMessageContaining("Object of class [java.lang.Long] must be an instance of | java |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/reflection/Administration.java | {
"start": 630,
"end": 1640
} | class ____ extends Organization {
@Id
private Integer id;
private String firstname;
private String lastname;
private String address;
private Integer version;
@Basic
private String transientField;
@OneToOne
@JoinColumns({@JoinColumn(name = "busNumber_fk"), @JoinColumn(name = "busDriver_fk")})
private BusTrip defaultBusTrip;
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
public Integer getVersion() {
return version;
}
public void setVersion(Integer version) {
this.version = version;
}
public String getFirstname() {
return firstname;
}
public void setFirstname(String firstname) {
this.firstname = firstname;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getLastname() {
return lastname;
}
public void setLastname(String lastname) {
this.lastname = lastname;
}
@PostLoad
public void calculate() {
//...
}
}
| Administration |
java | elastic__elasticsearch | modules/lang-painless/src/test/java/org/elasticsearch/painless/DivisionTests.java | {
"start": 595,
"end": 23636
} | class ____ extends ScriptTestCase {
// TODO: byte,short,char
public void testBasics() throws Exception {
assertEquals(2.25F / 1.5F, exec("return 2.25F / 1.5F;"));
assertEquals(0.5, exec("double x = 1; float y = 2; return x / y;"));
}
public void testInt() throws Exception {
assertEquals(1 / 1, exec("int x = 1; int y = 1; return x/y;"));
assertEquals(2 / 3, exec("int x = 2; int y = 3; return x/y;"));
assertEquals(5 / 10, exec("int x = 5; int y = 10; return x/y;"));
assertEquals(10 / 1 / 2, exec("int x = 10; int y = 1; int z = 2; return x/y/z;"));
assertEquals((10 / 1) / 2, exec("int x = 10; int y = 1; int z = 2; return (x/y)/z;"));
assertEquals(10 / (4 / 2), exec("int x = 10; int y = 4; int z = 2; return x/(y/z);"));
assertEquals(10 / 1, exec("int x = 10; int y = 1; return x/y;"));
assertEquals(0 / 1, exec("int x = 0; int y = 1; return x/y;"));
}
public void testIntConst() throws Exception {
assertEquals(1 / 1, exec("return 1/1;"));
assertEquals(2 / 3, exec("return 2/3;"));
assertEquals(5 / 10, exec("return 5/10;"));
assertEquals(10 / 1 / 2, exec("return 10/1/2;"));
assertEquals((10 / 1) / 2, exec("return (10/1)/2;"));
assertEquals(10 / (4 / 2), exec("return 10/(4/2);"));
assertEquals(10 / 1, exec("return 10/1;"));
assertEquals(0 / 1, exec("return 0/1;"));
}
public void testLong() throws Exception {
assertEquals(1L / 1L, exec("long x = 1; long y = 1; return x/y;"));
assertEquals(2L / 3L, exec("long x = 2; long y = 3; return x/y;"));
assertEquals(5L / 10L, exec("long x = 5; long y = 10; return x/y;"));
assertEquals(10L / 1L / 2L, exec("long x = 10; long y = 1; long z = 2; return x/y/z;"));
assertEquals((10L / 1L) / 2L, exec("long x = 10; long y = 1; long z = 2; return (x/y)/z;"));
assertEquals(10L / (4L / 2L), exec("long x = 10; long y = 4; long z = 2; return x/(y/z);"));
assertEquals(10L / 1L, exec("long x = 10; long y = 1; return x/y;"));
assertEquals(0L / 1L, exec("long x = 0; long y = 1; return x/y;"));
}
public void testLongConst() throws Exception {
assertEquals(1L / 1L, exec("return 1L/1L;"));
assertEquals(2L / 3L, exec("return 2L/3L;"));
assertEquals(5L / 10L, exec("return 5L/10L;"));
assertEquals(10L / 1L / 2L, exec("return 10L/1L/2L;"));
assertEquals((10L / 1L) / 2L, exec("return (10L/1L)/2L;"));
assertEquals(10L / (4L / 2L), exec("return 10L/(4L/2L);"));
assertEquals(10L / 1L, exec("return 10L/1L;"));
assertEquals(0L / 1L, exec("return 0L/1L;"));
}
public void testFloat() throws Exception {
assertEquals(1F / 1F, exec("float x = 1; float y = 1; return x/y;"));
assertEquals(2F / 3F, exec("float x = 2; float y = 3; return x/y;"));
assertEquals(5F / 10F, exec("float x = 5; float y = 10; return x/y;"));
assertEquals(10F / 1F / 2F, exec("float x = 10; float y = 1; float z = 2; return x/y/z;"));
assertEquals((10F / 1F) / 2F, exec("float x = 10; float y = 1; float z = 2; return (x/y)/z;"));
assertEquals(10F / (4F / 2F), exec("float x = 10; float y = 4; float z = 2; return x/(y/z);"));
assertEquals(10F / 1F, exec("float x = 10; float y = 1; return x/y;"));
assertEquals(0F / 1F, exec("float x = 0; float y = 1; return x/y;"));
}
public void testFloatConst() throws Exception {
assertEquals(1F / 1F, exec("return 1F/1F;"));
assertEquals(2F / 3F, exec("return 2F/3F;"));
assertEquals(5F / 10F, exec("return 5F/10F;"));
assertEquals(10F / 1F / 2F, exec("return 10F/1F/2F;"));
assertEquals((10F / 1F) / 2F, exec("return (10F/1F)/2F;"));
assertEquals(10F / (4F / 2F), exec("return 10F/(4F/2F);"));
assertEquals(10F / 1F, exec("return 10F/1F;"));
assertEquals(0F / 1F, exec("return 0F/1F;"));
}
public void testDouble() throws Exception {
assertEquals(1.0 / 1.0, exec("double x = 1; double y = 1; return x/y;"));
assertEquals(2.0 / 3.0, exec("double x = 2; double y = 3; return x/y;"));
assertEquals(5.0 / 10.0, exec("double x = 5; double y = 10; return x/y;"));
assertEquals(10.0 / 1.0 / 2.0, exec("double x = 10; double y = 1; double z = 2; return x/y/z;"));
assertEquals((10.0 / 1.0) / 2.0, exec("double x = 10; double y = 1; double z = 2; return (x/y)/z;"));
assertEquals(10.0 / (4.0 / 2.0), exec("double x = 10; double y = 4; double z = 2; return x/(y/z);"));
assertEquals(10.0 / 1.0, exec("double x = 10; double y = 1; return x/y;"));
assertEquals(0.0 / 1.0, exec("double x = 0; double y = 1; return x/y;"));
}
public void testDoubleConst() throws Exception {
assertEquals(1.0 / 1.0, exec("return 1.0/1.0;"));
assertEquals(2.0 / 3.0, exec("return 2.0/3.0;"));
assertEquals(5.0 / 10.0, exec("return 5.0/10.0;"));
assertEquals(10.0 / 1.0 / 2.0, exec("return 10.0/1.0/2.0;"));
assertEquals((10.0 / 1.0) / 2.0, exec("return (10.0/1.0)/2.0;"));
assertEquals(10.0 / (4.0 / 2.0), exec("return 10.0/(4.0/2.0);"));
assertEquals(10.0 / 1.0, exec("return 10.0/1.0;"));
assertEquals(0.0 / 1.0, exec("return 0.0/1.0;"));
}
public void testDivideByZero() throws Exception {
expectScriptThrows(ArithmeticException.class, () -> { exec("int x = 1; int y = 0; return x / y;"); });
expectScriptThrows(ArithmeticException.class, () -> { exec("long x = 1L; long y = 0L; return x / y;"); });
}
public void testDivideByZeroConst() throws Exception {
expectScriptThrows(ArithmeticException.class, () -> { exec("return 1/0;"); });
expectScriptThrows(ArithmeticException.class, () -> { exec("return 1L/0L;"); });
}
public void testDef() {
assertEquals(1, exec("def x = (byte)2; def y = (byte)2; return x / y"));
assertEquals(1, exec("def x = (short)2; def y = (byte)2; return x / y"));
assertEquals(1, exec("def x = (char)2; def y = (byte)2; return x / y"));
assertEquals(1, exec("def x = (int)2; def y = (byte)2; return x / y"));
assertEquals(1L, exec("def x = (long)2; def y = (byte)2; return x / y"));
assertEquals(1F, exec("def x = (float)2; def y = (byte)2; return x / y"));
assertEquals(1D, exec("def x = (double)2; def y = (byte)2; return x / y"));
assertEquals(1, exec("def x = (byte)2; def y = (short)2; return x / y"));
assertEquals(1, exec("def x = (short)2; def y = (short)2; return x / y"));
assertEquals(1, exec("def x = (char)2; def y = (short)2; return x / y"));
assertEquals(1, exec("def x = (int)2; def y = (short)2; return x / y"));
assertEquals(1L, exec("def x = (long)2; def y = (short)2; return x / y"));
assertEquals(1F, exec("def x = (float)2; def y = (short)2; return x / y"));
assertEquals(1D, exec("def x = (double)2; def y = (short)2; return x / y"));
assertEquals(1, exec("def x = (byte)2; def y = (char)2; return x / y"));
assertEquals(1, exec("def x = (short)2; def y = (char)2; return x / y"));
assertEquals(1, exec("def x = (char)2; def y = (char)2; return x / y"));
assertEquals(1, exec("def x = (int)2; def y = (char)2; return x / y"));
assertEquals(1L, exec("def x = (long)2; def y = (char)2; return x / y"));
assertEquals(1F, exec("def x = (float)2; def y = (char)2; return x / y"));
assertEquals(1D, exec("def x = (double)2; def y = (char)2; return x / y"));
assertEquals(1, exec("def x = (byte)2; def y = (int)2; return x / y"));
assertEquals(1, exec("def x = (short)2; def y = (int)2; return x / y"));
assertEquals(1, exec("def x = (char)2; def y = (int)2; return x / y"));
assertEquals(1, exec("def x = (int)2; def y = (int)2; return x / y"));
assertEquals(1L, exec("def x = (long)2; def y = (int)2; return x / y"));
assertEquals(1F, exec("def x = (float)2; def y = (int)2; return x / y"));
assertEquals(1D, exec("def x = (double)2; def y = (int)2; return x / y"));
assertEquals(1L, exec("def x = (byte)2; def y = (long)2; return x / y"));
assertEquals(1L, exec("def x = (short)2; def y = (long)2; return x / y"));
assertEquals(1L, exec("def x = (char)2; def y = (long)2; return x / y"));
assertEquals(1L, exec("def x = (int)2; def y = (long)2; return x / y"));
assertEquals(1L, exec("def x = (long)2; def y = (long)2; return x / y"));
assertEquals(1F, exec("def x = (float)2; def y = (long)2; return x / y"));
assertEquals(1D, exec("def x = (double)2; def y = (long)2; return x / y"));
assertEquals(1F, exec("def x = (byte)2; def y = (float)2; return x / y"));
assertEquals(1F, exec("def x = (short)2; def y = (float)2; return x / y"));
assertEquals(1F, exec("def x = (char)2; def y = (float)2; return x / y"));
assertEquals(1F, exec("def x = (int)2; def y = (float)2; return x / y"));
assertEquals(1F, exec("def x = (long)2; def y = (float)2; return x / y"));
assertEquals(1F, exec("def x = (float)2; def y = (float)2; return x / y"));
assertEquals(1D, exec("def x = (double)2; def y = (float)2; return x / y"));
assertEquals(1D, exec("def x = (byte)2; def y = (double)2; return x / y"));
assertEquals(1D, exec("def x = (short)2; def y = (double)2; return x / y"));
assertEquals(1D, exec("def x = (char)2; def y = (double)2; return x / y"));
assertEquals(1D, exec("def x = (int)2; def y = (double)2; return x / y"));
assertEquals(1D, exec("def x = (long)2; def y = (double)2; return x / y"));
assertEquals(1D, exec("def x = (float)2; def y = (double)2; return x / y"));
assertEquals(1D, exec("def x = (double)2; def y = (double)2; return x / y"));
assertEquals(1, exec("def x = (byte)2; def y = (byte)2; return x / y"));
assertEquals(1, exec("def x = (short)2; def y = (short)2; return x / y"));
assertEquals(1, exec("def x = (char)2; def y = (char)2; return x / y"));
assertEquals(1, exec("def x = (int)2; def y = (int)2; return x / y"));
assertEquals(1L, exec("def x = (long)2; def y = (long)2; return x / y"));
assertEquals(1F, exec("def x = (float)2; def y = (float)2; return x / y"));
assertEquals(1D, exec("def x = (double)2; def y = (double)2; return x / y"));
}
public void testDefTypedLHS() {
assertEquals(1, exec("byte x = (byte)2; def y = (byte)2; return x / y"));
assertEquals(1, exec("short x = (short)2; def y = (byte)2; return x / y"));
assertEquals(1, exec("char x = (char)2; def y = (byte)2; return x / y"));
assertEquals(1, exec("int x = (int)2; def y = (byte)2; return x / y"));
assertEquals(1L, exec("long x = (long)2; def y = (byte)2; return x / y"));
assertEquals(1F, exec("float x = (float)2; def y = (byte)2; return x / y"));
assertEquals(1D, exec("double x = (double)2; def y = (byte)2; return x / y"));
assertEquals(1, exec("byte x = (byte)2; def y = (short)2; return x / y"));
assertEquals(1, exec("short x = (short)2; def y = (short)2; return x / y"));
assertEquals(1, exec("char x = (char)2; def y = (short)2; return x / y"));
assertEquals(1, exec("int x = (int)2; def y = (short)2; return x / y"));
assertEquals(1L, exec("long x = (long)2; def y = (short)2; return x / y"));
assertEquals(1F, exec("float x = (float)2; def y = (short)2; return x / y"));
assertEquals(1D, exec("double x = (double)2; def y = (short)2; return x / y"));
assertEquals(1, exec("byte x = (byte)2; def y = (char)2; return x / y"));
assertEquals(1, exec("short x = (short)2; def y = (char)2; return x / y"));
assertEquals(1, exec("char x = (char)2; def y = (char)2; return x / y"));
assertEquals(1, exec("int x = (int)2; def y = (char)2; return x / y"));
assertEquals(1L, exec("long x = (long)2; def y = (char)2; return x / y"));
assertEquals(1F, exec("float x = (float)2; def y = (char)2; return x / y"));
assertEquals(1D, exec("double x = (double)2; def y = (char)2; return x / y"));
assertEquals(1, exec("byte x = (byte)2; def y = (int)2; return x / y"));
assertEquals(1, exec("short x = (short)2; def y = (int)2; return x / y"));
assertEquals(1, exec("char x = (char)2; def y = (int)2; return x / y"));
assertEquals(1, exec("int x = (int)2; def y = (int)2; return x / y"));
assertEquals(1L, exec("long x = (long)2; def y = (int)2; return x / y"));
assertEquals(1F, exec("float x = (float)2; def y = (int)2; return x / y"));
assertEquals(1D, exec("double x = (double)2; def y = (int)2; return x / y"));
assertEquals(1L, exec("byte x = (byte)2; def y = (long)2; return x / y"));
assertEquals(1L, exec("short x = (short)2; def y = (long)2; return x / y"));
assertEquals(1L, exec("char x = (char)2; def y = (long)2; return x / y"));
assertEquals(1L, exec("int x = (int)2; def y = (long)2; return x / y"));
assertEquals(1L, exec("long x = (long)2; def y = (long)2; return x / y"));
assertEquals(1F, exec("float x = (float)2; def y = (long)2; return x / y"));
assertEquals(1D, exec("double x = (double)2; def y = (long)2; return x / y"));
assertEquals(1F, exec("byte x = (byte)2; def y = (float)2; return x / y"));
assertEquals(1F, exec("short x = (short)2; def y = (float)2; return x / y"));
assertEquals(1F, exec("char x = (char)2; def y = (float)2; return x / y"));
assertEquals(1F, exec("int x = (int)2; def y = (float)2; return x / y"));
assertEquals(1F, exec("long x = (long)2; def y = (float)2; return x / y"));
assertEquals(1F, exec("float x = (float)2; def y = (float)2; return x / y"));
assertEquals(1D, exec("double x = (double)2; def y = (float)2; return x / y"));
assertEquals(1D, exec("byte x = (byte)2; def y = (double)2; return x / y"));
assertEquals(1D, exec("short x = (short)2; def y = (double)2; return x / y"));
assertEquals(1D, exec("char x = (char)2; def y = (double)2; return x / y"));
assertEquals(1D, exec("int x = (int)2; def y = (double)2; return x / y"));
assertEquals(1D, exec("long x = (long)2; def y = (double)2; return x / y"));
assertEquals(1D, exec("float x = (float)2; def y = (double)2; return x / y"));
assertEquals(1D, exec("double x = (double)2; def y = (double)2; return x / y"));
assertEquals(1, exec("byte x = (byte)2; def y = (byte)2; return x / y"));
assertEquals(1, exec("short x = (short)2; def y = (short)2; return x / y"));
assertEquals(1, exec("char x = (char)2; def y = (char)2; return x / y"));
assertEquals(1, exec("int x = (int)2; def y = (int)2; return x / y"));
assertEquals(1L, exec("long x = (long)2; def y = (long)2; return x / y"));
assertEquals(1F, exec("float x = (float)2; def y = (float)2; return x / y"));
assertEquals(1D, exec("double x = (double)2; def y = (double)2; return x / y"));
}
public void testDefTypedRHS() {
assertEquals(1, exec("def x = (byte)2; byte y = (byte)2; return x / y"));
assertEquals(1, exec("def x = (short)2; byte y = (byte)2; return x / y"));
assertEquals(1, exec("def x = (char)2; byte y = (byte)2; return x / y"));
assertEquals(1, exec("def x = (int)2; byte y = (byte)2; return x / y"));
assertEquals(1L, exec("def x = (long)2; byte y = (byte)2; return x / y"));
assertEquals(1F, exec("def x = (float)2; byte y = (byte)2; return x / y"));
assertEquals(1D, exec("def x = (double)2; byte y = (byte)2; return x / y"));
assertEquals(1, exec("def x = (byte)2; short y = (short)2; return x / y"));
assertEquals(1, exec("def x = (short)2; short y = (short)2; return x / y"));
assertEquals(1, exec("def x = (char)2; short y = (short)2; return x / y"));
assertEquals(1, exec("def x = (int)2; short y = (short)2; return x / y"));
assertEquals(1L, exec("def x = (long)2; short y = (short)2; return x / y"));
assertEquals(1F, exec("def x = (float)2; short y = (short)2; return x / y"));
assertEquals(1D, exec("def x = (double)2; short y = (short)2; return x / y"));
assertEquals(1, exec("def x = (byte)2; char y = (char)2; return x / y"));
assertEquals(1, exec("def x = (short)2; char y = (char)2; return x / y"));
assertEquals(1, exec("def x = (char)2; char y = (char)2; return x / y"));
assertEquals(1, exec("def x = (int)2; char y = (char)2; return x / y"));
assertEquals(1L, exec("def x = (long)2; char y = (char)2; return x / y"));
assertEquals(1F, exec("def x = (float)2; char y = (char)2; return x / y"));
assertEquals(1D, exec("def x = (double)2; char y = (char)2; return x / y"));
assertEquals(1, exec("def x = (byte)2; int y = (int)2; return x / y"));
assertEquals(1, exec("def x = (short)2; int y = (int)2; return x / y"));
assertEquals(1, exec("def x = (char)2; int y = (int)2; return x / y"));
assertEquals(1, exec("def x = (int)2; int y = (int)2; return x / y"));
assertEquals(1L, exec("def x = (long)2; int y = (int)2; return x / y"));
assertEquals(1F, exec("def x = (float)2; int y = (int)2; return x / y"));
assertEquals(1D, exec("def x = (double)2; int y = (int)2; return x / y"));
assertEquals(1L, exec("def x = (byte)2; long y = (long)2; return x / y"));
assertEquals(1L, exec("def x = (short)2; long y = (long)2; return x / y"));
assertEquals(1L, exec("def x = (char)2; long y = (long)2; return x / y"));
assertEquals(1L, exec("def x = (int)2; long y = (long)2; return x / y"));
assertEquals(1L, exec("def x = (long)2; long y = (long)2; return x / y"));
assertEquals(1F, exec("def x = (float)2; long y = (long)2; return x / y"));
assertEquals(1D, exec("def x = (double)2; long y = (long)2; return x / y"));
assertEquals(1F, exec("def x = (byte)2; float y = (float)2; return x / y"));
assertEquals(1F, exec("def x = (short)2; float y = (float)2; return x / y"));
assertEquals(1F, exec("def x = (char)2; float y = (float)2; return x / y"));
assertEquals(1F, exec("def x = (int)2; float y = (float)2; return x / y"));
assertEquals(1F, exec("def x = (long)2; float y = (float)2; return x / y"));
assertEquals(1F, exec("def x = (float)2; float y = (float)2; return x / y"));
assertEquals(1D, exec("def x = (double)2; float y = (float)2; return x / y"));
assertEquals(1D, exec("def x = (byte)2; double y = (double)2; return x / y"));
assertEquals(1D, exec("def x = (short)2; double y = (double)2; return x / y"));
assertEquals(1D, exec("def x = (char)2; double y = (double)2; return x / y"));
assertEquals(1D, exec("def x = (int)2; double y = (double)2; return x / y"));
assertEquals(1D, exec("def x = (long)2; double y = (double)2; return x / y"));
assertEquals(1D, exec("def x = (float)2; double y = (double)2; return x / y"));
assertEquals(1D, exec("def x = (double)2; double y = (double)2; return x / y"));
assertEquals(1, exec("def x = (byte)2; byte y = (byte)2; return x / y"));
assertEquals(1, exec("def x = (short)2; short y = (short)2; return x / y"));
assertEquals(1, exec("def x = (char)2; char y = (char)2; return x / y"));
assertEquals(1, exec("def x = (int)2; int y = (int)2; return x / y"));
assertEquals(1L, exec("def x = (long)2; long y = (long)2; return x / y"));
assertEquals(1F, exec("def x = (float)2; float y = (float)2; return x / y"));
assertEquals(1D, exec("def x = (double)2; double y = (double)2; return x / y"));
}
public void testCompoundAssignment() {
// byte
assertEquals((byte) 15, exec("byte x = 45; x /= 3; return x;"));
assertEquals((byte) -5, exec("byte x = 5; x /= -1; return x;"));
// short
assertEquals((short) 15, exec("short x = 45; x /= 3; return x;"));
assertEquals((short) -5, exec("short x = 5; x /= -1; return x;"));
// char
assertEquals((char) 15, exec("char x = 45; x /= 3; return x;"));
// int
assertEquals(15, exec("int x = 45; x /= 3; return x;"));
assertEquals(-5, exec("int x = 5; x /= -1; return x;"));
// long
assertEquals(15L, exec("long x = 45; x /= 3; return x;"));
assertEquals(-5L, exec("long x = 5; x /= -1; return x;"));
// float
assertEquals(15F, exec("float x = 45f; x /= 3; return x;"));
assertEquals(-5F, exec("float x = 5f; x /= -1; return x;"));
// double
assertEquals(15D, exec("double x = 45.0; x /= 3; return x;"));
assertEquals(-5D, exec("double x = 5.0; x /= -1; return x;"));
}
public void testDefCompoundAssignment() {
// byte
assertEquals((byte) 15, exec("def x = (byte)45; x /= 3; return x;"));
assertEquals((byte) -5, exec("def x = (byte)5; x /= -1; return x;"));
// short
assertEquals((short) 15, exec("def x = (short)45; x /= 3; return x;"));
assertEquals((short) -5, exec("def x = (short)5; x /= -1; return x;"));
// char
assertEquals((char) 15, exec("def x = (char)45; x /= 3; return x;"));
// int
assertEquals(15, exec("def x = 45; x /= 3; return x;"));
assertEquals(-5, exec("def x = 5; x /= -1; return x;"));
// long
assertEquals(15L, exec("def x = 45L; x /= 3; return x;"));
assertEquals(-5L, exec("def x = 5L; x /= -1; return x;"));
// float
assertEquals(15F, exec("def x = 45f; x /= 3; return x;"));
assertEquals(-5F, exec("def x = 5f; x /= -1; return x;"));
// double
assertEquals(15D, exec("def x = 45.0; x /= 3; return x;"));
assertEquals(-5D, exec("def x = 5.0; x /= -1; return x;"));
}
public void testCompoundAssignmentByZero() {
// byte
expectScriptThrows(ArithmeticException.class, () -> { exec("byte x = 1; x /= 0; return x;"); });
// short
expectScriptThrows(ArithmeticException.class, () -> { exec("short x = 1; x /= 0; return x;"); });
// char
expectScriptThrows(ArithmeticException.class, () -> { exec("char x = 1; x /= 0; return x;"); });
// int
expectScriptThrows(ArithmeticException.class, () -> { exec("int x = 1; x /= 0; return x;"); });
// long
expectScriptThrows(ArithmeticException.class, () -> { exec("long x = 1; x /= 0; return x;"); });
// def
expectScriptThrows(ArithmeticException.class, () -> { exec("def x = 1; x /= 0; return x;"); });
}
}
| DivisionTests |
java | dropwizard__dropwizard | dropwizard-health/src/main/java/io/dropwizard/health/HealthStateListener.java | {
"start": 71,
"end": 249
} | interface ____ extends EventListener, StateChangedCallback {
void onHealthyCheck(String healthCheckName);
void onUnhealthyCheck(String healthCheckName);
}
| HealthStateListener |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/legacy/Nameable.java | {
"start": 153,
"end": 291
} | interface ____ {
public String getName();
public void setName(String name);
public Long getKey();
public void setKey(Long key);
}
| Nameable |
java | spring-projects__spring-boot | buildSrc/src/main/java/org/springframework/boot/build/mavenplugin/PrepareMavenBinaries.java | {
"start": 1499,
"end": 2603
} | class ____ extends DefaultTask {
private final FileSystemOperations fileSystemOperations;
private final Provider<Set<FileTree>> binaries;
@Inject
public PrepareMavenBinaries(FileSystemOperations fileSystemOperations, ArchiveOperations archiveOperations) {
this.fileSystemOperations = fileSystemOperations;
ConfigurationContainer configurations = getProject().getConfigurations();
DependencyHandler dependencies = getProject().getDependencies();
this.binaries = getVersions().map((versions) -> versions.stream()
.map((version) -> configurations
.detachedConfiguration(dependencies.create("org.apache.maven:apache-maven:" + version + ":bin@zip")))
.map(Configuration::getSingleFile)
.map(archiveOperations::zipTree)
.collect(Collectors.toSet()));
}
@OutputDirectory
public abstract DirectoryProperty getOutputDir();
@Input
public abstract SetProperty<String> getVersions();
@TaskAction
public void prepareBinaries() {
this.fileSystemOperations.sync((sync) -> {
sync.into(getOutputDir());
this.binaries.get().forEach(sync::from);
});
}
}
| PrepareMavenBinaries |
java | mapstruct__mapstruct | integrationtest/src/test/resources/namingStrategyTest/usage/src/main/java/org/mapstruct/itest/naming/GolfPlayer.java | {
"start": 226,
"end": 636
} | class ____ {
private double handicap;
private String name;
public double handicap() {
return handicap;
}
public GolfPlayer withHandicap(double handicap) {
this.handicap = handicap;
return this;
}
public String name() {
return name;
}
public GolfPlayer withName(String name) {
this.name = name;
return this;
}
}
| GolfPlayer |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/eval/EvalMethodLeftTest.java | {
"start": 185,
"end": 391
} | class ____ extends TestCase {
public void test_ascii() throws Exception {
assertEquals("fooba", SQLEvalVisitorUtils.evalExpr(JdbcConstants.MYSQL, "LEFT('foobarbar', 5)"));
}
}
| EvalMethodLeftTest |
java | google__guava | android/guava/src/com/google/common/primitives/UnsignedInteger.java | {
"start": 1730,
"end": 8496
} | class ____ extends Number implements Comparable<UnsignedInteger> {
public static final UnsignedInteger ZERO = fromIntBits(0);
public static final UnsignedInteger ONE = fromIntBits(1);
public static final UnsignedInteger MAX_VALUE = fromIntBits(-1);
private final int value;
private UnsignedInteger(int value) {
// GWT doesn't consistently overflow values to make them 32-bit, so we need to force it.
this.value = value & 0xffffffff;
}
/**
* Returns an {@code UnsignedInteger} corresponding to a given bit representation. The argument is
* interpreted as an unsigned 32-bit value. Specifically, the sign bit of {@code bits} is
* interpreted as a normal bit, and all other bits are treated as usual.
*
* <p>If the argument is nonnegative, the returned result will be equal to {@code bits},
* otherwise, the result will be equal to {@code 2^32 + bits}.
*
* <p>To represent unsigned decimal constants, consider {@link #valueOf(long)} instead.
*
* @since 14.0
*/
public static UnsignedInteger fromIntBits(int bits) {
return new UnsignedInteger(bits);
}
/**
* Returns an {@code UnsignedInteger} that is equal to {@code value}, if possible. The inverse
* operation of {@link #longValue()}.
*/
public static UnsignedInteger valueOf(long value) {
checkArgument(
(value & INT_MASK) == value,
"value (%s) is outside the range for an unsigned integer value",
value);
return fromIntBits((int) value);
}
/**
* Returns a {@code UnsignedInteger} representing the same value as the specified {@link
* BigInteger}. This is the inverse operation of {@link #bigIntegerValue()}.
*
* @throws IllegalArgumentException if {@code value} is negative or {@code value >= 2^32}
*/
public static UnsignedInteger valueOf(BigInteger value) {
checkNotNull(value);
checkArgument(
value.signum() >= 0 && value.bitLength() <= Integer.SIZE,
"value (%s) is outside the range for an unsigned integer value",
value);
return fromIntBits(value.intValue());
}
/**
* Returns an {@code UnsignedInteger} holding the value of the specified {@code String}, parsed as
* an unsigned {@code int} value.
*
* @throws NumberFormatException if the string does not contain a parsable unsigned {@code int}
* value
*/
public static UnsignedInteger valueOf(String string) {
return valueOf(string, 10);
}
/**
* Returns an {@code UnsignedInteger} holding the value of the specified {@code String}, parsed as
* an unsigned {@code int} value in the specified radix.
*
* @throws NumberFormatException if the string does not contain a parsable unsigned {@code int}
* value
*/
public static UnsignedInteger valueOf(String string, int radix) {
return fromIntBits(UnsignedInts.parseUnsignedInt(string, radix));
}
/**
* Returns the result of adding this and {@code val}. If the result would have more than 32 bits,
* returns the low 32 bits of the result.
*
* @since 14.0
*/
public UnsignedInteger plus(UnsignedInteger val) {
return fromIntBits(this.value + checkNotNull(val).value);
}
/**
* Returns the result of subtracting this and {@code val}. If the result would be negative,
* returns the low 32 bits of the result.
*
* @since 14.0
*/
public UnsignedInteger minus(UnsignedInteger val) {
return fromIntBits(value - checkNotNull(val).value);
}
/**
* Returns the result of multiplying this and {@code val}. If the result would have more than 32
* bits, returns the low 32 bits of the result.
*
* @since 14.0
*/
@J2ktIncompatible
@GwtIncompatible // Does not truncate correctly
public UnsignedInteger times(UnsignedInteger val) {
// TODO(lowasser): make this GWT-compatible
return fromIntBits(value * checkNotNull(val).value);
}
/**
* Returns the result of dividing this by {@code val}.
*
* @throws ArithmeticException if {@code val} is zero
* @since 14.0
*/
public UnsignedInteger dividedBy(UnsignedInteger val) {
return fromIntBits(UnsignedInts.divide(value, checkNotNull(val).value));
}
/**
* Returns this mod {@code val}.
*
* @throws ArithmeticException if {@code val} is zero
* @since 14.0
*/
public UnsignedInteger mod(UnsignedInteger val) {
return fromIntBits(UnsignedInts.remainder(value, checkNotNull(val).value));
}
/**
* Returns the value of this {@code UnsignedInteger} as an {@code int}. This is an inverse
* operation to {@link #fromIntBits}.
*
* <p>Note that if this {@code UnsignedInteger} holds a value {@code >= 2^31}, the returned value
* will be equal to {@code this - 2^32}.
*/
@Override
public int intValue() {
return value;
}
/** Returns the value of this {@code UnsignedInteger} as a {@code long}. */
@Override
public long longValue() {
return toLong(value);
}
/**
* Returns the value of this {@code UnsignedInteger} as a {@code float}, analogous to a widening
* primitive conversion from {@code int} to {@code float}, and correctly rounded.
*/
@Override
public float floatValue() {
return longValue();
}
/**
* Returns the value of this {@code UnsignedInteger} as a {@code double}, analogous to a widening
* primitive conversion from {@code int} to {@code double}, and correctly rounded.
*/
@Override
public double doubleValue() {
return longValue();
}
/** Returns the value of this {@code UnsignedInteger} as a {@link BigInteger}. */
public BigInteger bigIntegerValue() {
return BigInteger.valueOf(longValue());
}
/**
* Compares this unsigned integer to another unsigned integer. Returns {@code 0} if they are
* equal, a negative number if {@code this < other}, and a positive number if {@code this >
* other}.
*/
@Override
public int compareTo(UnsignedInteger other) {
checkNotNull(other);
return compare(value, other.value);
}
@Override
public int hashCode() {
return value;
}
@Override
public boolean equals(@Nullable Object obj) {
if (obj instanceof UnsignedInteger) {
UnsignedInteger other = (UnsignedInteger) obj;
return value == other.value;
}
return false;
}
/** Returns a string representation of the {@code UnsignedInteger} value, in base 10. */
@Override
public String toString() {
return toString(10);
}
/**
* Returns a string representation of the {@code UnsignedInteger} value, in base {@code radix}. If
* {@code radix < Character.MIN_RADIX} or {@code radix > Character.MAX_RADIX}, the radix {@code
* 10} is used.
*/
public String toString(int radix) {
return UnsignedInts.toString(value, radix);
}
}
| UnsignedInteger |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/core/MaybeSource.java | {
"start": 1012,
"end": 1358
} | interface ____<@NonNull T> {
/**
* Subscribes the given {@link MaybeObserver} to this {@link MaybeSource} instance.
* @param observer the {@code MaybeObserver}, not {@code null}
* @throws NullPointerException if {@code observer} is {@code null}
*/
void subscribe(@NonNull MaybeObserver<? super T> observer);
}
| MaybeSource |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/junit4/SpringJUnit4ClassRunner.java | {
"start": 6968,
"end": 7347
} | class ____ be managed
*/
protected TestContextManager createTestContextManager(Class<?> clazz) {
return new TestContextManager(clazz);
}
/**
* Get the {@link TestContextManager} associated with this runner.
*/
protected final TestContextManager getTestContextManager() {
return this.testContextManager;
}
/**
* Return a description suitable for an ignored test | to |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/wall/spi/WallVisitorBase.java | {
"start": 139,
"end": 1927
} | class ____ implements WallVisitor {
protected final WallConfig config;
protected final WallProvider provider;
protected final List<Violation> violations = new ArrayList<Violation>();
protected boolean sqlModified;
protected boolean sqlEndOfComment;
protected List<WallUpdateCheckItem> updateCheckItems;
public WallVisitorBase(WallProvider provider) {
this.config = provider.getConfig();
this.provider = provider;
}
@Override
public boolean isSqlModified() {
return sqlModified;
}
@Override
public void setSqlModified(boolean sqlModified) {
this.sqlModified = sqlModified;
}
@Override
public WallProvider getProvider() {
return provider;
}
@Override
public WallConfig getConfig() {
return config;
}
public void addViolation(Violation violation) {
this.violations.add(violation);
}
@Override
public List<Violation> getViolations() {
return violations;
}
@Override
public boolean isSqlEndOfComment() {
return this.sqlEndOfComment;
}
@Override
public void setSqlEndOfComment(boolean sqlEndOfComment) {
this.sqlEndOfComment = sqlEndOfComment;
}
public void addWallUpdateCheckItem(WallUpdateCheckItem item) {
if (updateCheckItems == null) {
updateCheckItems = new ArrayList<WallUpdateCheckItem>();
}
updateCheckItems.add(item);
}
public List<WallUpdateCheckItem> getUpdateCheckItems() {
return updateCheckItems;
}
public boolean isDenyTable(String name) {
if (!config
.isTableCheck()) {
return false;
}
return !provider.checkDenyTable(name);
}
}
| WallVisitorBase |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/load/HttpException.java | {
"start": 444,
"end": 1469
} | class ____ extends IOException {
private static final long serialVersionUID = 1L;
public static final int UNKNOWN = -1;
private final int statusCode;
public HttpException(int statusCode) {
this("Http request failed", statusCode);
}
/**
* @deprecated You should always include a status code, default to {@link #UNKNOWN} if you can't
* come up with a reasonable one. This method will be removed in a future version.
*/
@Deprecated
public HttpException(String message) {
this(message, UNKNOWN);
}
public HttpException(String message, int statusCode) {
this(message, statusCode, null /*cause*/);
}
public HttpException(String message, int statusCode, @Nullable Throwable cause) {
super(message + ", status code: " + statusCode, cause);
this.statusCode = statusCode;
}
/**
* Returns the http status code, or {@link #UNKNOWN} if the request failed without providing a
* status code.
*/
public int getStatusCode() {
return statusCode;
}
}
| HttpException |
java | google__guice | core/test/com/google/inject/MethodInterceptionTest.java | {
"start": 21557,
"end": 21664
} | interface ____ extends Provider<String> {
String get(int shard);
}
public static | ShardedStringProvider |
java | spring-projects__spring-framework | spring-r2dbc/src/test/java/org/springframework/r2dbc/connection/SingleConnectionFactoryTests.java | {
"start": 1314,
"end": 4267
} | class ____ {
@Test
void shouldAllocateSameConnection() {
SingleConnectionFactory factory = new SingleConnectionFactory("r2dbc:h2:mem:///foo", false);
Mono<? extends Connection> cf1 = factory.create();
Mono<? extends Connection> cf2 = factory.create();
Connection c1 = cf1.block();
Connection c2 = cf2.block();
assertThat(c1).isSameAs(c2);
factory.destroy();
}
@Test
void shouldApplyAutoCommit() {
SingleConnectionFactory factory = new SingleConnectionFactory("r2dbc:h2:mem:///foo", false);
factory.setAutoCommit(false);
factory.create().as(StepVerifier::create)
.consumeNextWith(actual -> assertThat(actual.isAutoCommit()).isFalse())
.verifyComplete();
factory.setAutoCommit(true);
factory.create().as(StepVerifier::create)
.consumeNextWith(actual -> assertThat(actual.isAutoCommit()).isTrue())
.verifyComplete();
factory.destroy();
}
@Test
@SuppressWarnings("rawtypes")
void shouldSuppressClose() {
SingleConnectionFactory factory = new SingleConnectionFactory("r2dbc:h2:mem:///foo", true);
Connection connection = factory.create().block();
StepVerifier.create(connection.close()).verifyComplete();
assertThat(connection).isInstanceOf(Wrapped.class);
assertThat(((Wrapped) connection).unwrap()).isInstanceOf(H2Connection.class);
StepVerifier.create(
connection.setTransactionIsolationLevel(IsolationLevel.READ_COMMITTED))
.verifyComplete();
factory.destroy();
}
@Test
void shouldNotSuppressClose() {
SingleConnectionFactory factory = new SingleConnectionFactory("r2dbc:h2:mem:///foo", false);
Connection connection = factory.create().block();
StepVerifier.create(connection.close()).verifyComplete();
StepVerifier.create(connection.setTransactionIsolationLevel(IsolationLevel.READ_COMMITTED))
.verifyError(R2dbcNonTransientResourceException.class);
factory.destroy();
}
@Test
void releaseConnectionShouldNotCloseConnection() {
Connection connectionMock = mock();
ConnectionFactoryMetadata metadata = mock();
SingleConnectionFactory factory = new SingleConnectionFactory(connectionMock, metadata, true);
Connection connection = factory.create().block();
ConnectionFactoryUtils.releaseConnection(connection, factory)
.as(StepVerifier::create)
.verifyComplete();
verify(connectionMock, never()).close();
}
@Test
void releaseConnectionShouldCloseUnrelatedConnection() {
Connection connectionMock = mock();
Connection otherConnection = mock();
ConnectionFactoryMetadata metadata = mock();
when(otherConnection.close()).thenReturn(Mono.empty());
SingleConnectionFactory factory = new SingleConnectionFactory(connectionMock, metadata, false);
factory.create().as(StepVerifier::create).expectNextCount(1).verifyComplete();
ConnectionFactoryUtils.releaseConnection(otherConnection, factory)
.as(StepVerifier::create)
.verifyComplete();
verify(otherConnection).close();
}
}
| SingleConnectionFactoryTests |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/annotation/AnnotationValue.java | {
"start": 10924,
"end": 12656
} | class ____ {
* ...
* }</pre>
*
* <p>You can use this method to resolve the values of the {@code PropertySource} annotation such that the following assertion is true:</p>
*
* <pre class="code">
* annotationValue.getProperties("value") == [one:1, two:2]
* </pre>
*
* @param member The member
* @return The properties as an immutable map.
*/
@NonNull
public Map<String, String> getProperties(@NonNull String member) {
return getProperties(member, "name");
}
/**
* Resolve properties with a custom key member.
*
* @param member The member to resolve the properties from
* @param keyMember The member of the sub annotation that represents the key.
* @return The properties.
* @see #getProperties(String)
*/
public Map<String, String> getProperties(@NonNull String member, String keyMember) {
ArgumentUtils.requireNonNull("keyMember", keyMember);
if (StringUtils.isEmpty(member)) {
return Collections.emptyMap();
}
List<AnnotationValue<Annotation>> values = getAnnotations(member);
if (CollectionUtils.isEmpty(values)) {
return Collections.emptyMap();
}
Map<String, String> props = CollectionUtils.newLinkedHashMap(values.size());
for (AnnotationValue<Annotation> av : values) {
String name = av.stringValue(keyMember).orElse(null);
if (StringUtils.isNotEmpty(name)) {
av.stringValue(AnnotationMetadata.VALUE_MEMBER, valueMapper).ifPresent(v -> props.put(name, v));
}
}
return Collections.unmodifiableMap(props);
}
/**
* Return the | MyBean |
java | apache__rocketmq | broker/src/test/java/org/apache/rocketmq/broker/metrics/BrokerMetricsManagerTest.java | {
"start": 1878,
"end": 14700
} | class ____ {
private BrokerMetricsManager createTestBrokerMetricsManager() {
MessageStoreConfig messageStoreConfig = new MessageStoreConfig();
String storePathRootDir = System.getProperty("java.io.tmpdir") + File.separator + "store-"
+ UUID.randomUUID();
messageStoreConfig.setStorePathRootDir(storePathRootDir);
BrokerConfig brokerConfig = new BrokerConfig();
NettyServerConfig nettyServerConfig = new NettyServerConfig();
nettyServerConfig.setListenPort(0);
BrokerController brokerController = new BrokerController(brokerConfig, nettyServerConfig,
new NettyClientConfig(), messageStoreConfig);
return new BrokerMetricsManager(brokerController);
}
@Test
public void testNewAttributesBuilder() {
BrokerMetricsManager metricsManager = createTestBrokerMetricsManager();
Attributes attributes = metricsManager.newAttributesBuilder().put("a", "b")
.build();
assertThat(attributes.get(AttributeKey.stringKey("a"))).isEqualTo("b");
}
@Test
public void testCustomizedAttributesBuilder() {
BrokerMetricsManager metricsManager = createTestBrokerMetricsManager();
// Create a custom attributes builder supplier for testing
metricsManager.setAttributesBuilderSupplier(() -> new AttributesBuilder() {
private AttributesBuilder attributesBuilder = Attributes.builder();
@Override
public Attributes build() {
return attributesBuilder.put("customized", "value").build();
}
@Override
public <T> AttributesBuilder put(AttributeKey<Long> key, int value) {
attributesBuilder.put(key, value);
return this;
}
@Override
public <T> AttributesBuilder put(AttributeKey<T> key, T value) {
attributesBuilder.put(key, value);
return this;
}
@Override
public AttributesBuilder putAll(Attributes attributes) {
attributesBuilder.putAll(attributes);
return this;
}
});
Attributes attributes = metricsManager.newAttributesBuilder().put("a", "b")
.build();
assertThat(attributes.get(AttributeKey.stringKey("a"))).isEqualTo("b");
assertThat(attributes.get(AttributeKey.stringKey("customized"))).isEqualTo("value");
}
@Test
public void testIsRetryOrDlqTopicWithRetryTopic() {
String topic = MixAll.RETRY_GROUP_TOPIC_PREFIX + "TestTopic";
boolean result = BrokerMetricsManager.isRetryOrDlqTopic(topic);
assertThat(result).isTrue();
}
@Test
public void testIsRetryOrDlqTopicWithDlqTopic() {
String topic = MixAll.DLQ_GROUP_TOPIC_PREFIX + "TestTopic";
boolean result = BrokerMetricsManager.isRetryOrDlqTopic(topic);
assertThat(result).isTrue();
}
@Test
public void testIsRetryOrDlqTopicWithNonRetryOrDlqTopic() {
String topic = "NormalTopic";
boolean result = BrokerMetricsManager.isRetryOrDlqTopic(topic);
assertThat(result).isFalse();
}
@Test
public void testIsRetryOrDlqTopicWithEmptyTopic() {
String topic = "";
boolean result = BrokerMetricsManager.isRetryOrDlqTopic(topic);
assertThat(result).isFalse();
}
@Test
public void testIsRetryOrDlqTopicWithNullTopic() {
String topic = null;
boolean result = BrokerMetricsManager.isRetryOrDlqTopic(topic);
assertThat(result).isFalse();
}
@Test
public void testIsSystemGroup_SystemGroup_ReturnsTrue() {
String group = "FooGroup";
String systemGroup = MixAll.CID_RMQ_SYS_PREFIX + group;
boolean result = BrokerMetricsManager.isSystemGroup(systemGroup);
assertThat(result).isTrue();
}
@Test
public void testIsSystemGroup_NonSystemGroup_ReturnsFalse() {
String group = "FooGroup";
boolean result = BrokerMetricsManager.isSystemGroup(group);
assertThat(result).isFalse();
}
@Test
public void testIsSystemGroup_EmptyGroup_ReturnsFalse() {
String group = "";
boolean result = BrokerMetricsManager.isSystemGroup(group);
assertThat(result).isFalse();
}
@Test
public void testIsSystemGroup_NullGroup_ReturnsFalse() {
String group = null;
boolean result = BrokerMetricsManager.isSystemGroup(group);
assertThat(result).isFalse();
}
@Test
public void testIsSystem_SystemTopicOrSystemGroup_ReturnsTrue() {
String topic = "FooTopic";
String group = "FooGroup";
String systemTopic = TopicValidator.RMQ_SYS_TRANS_HALF_TOPIC;
String systemGroup = MixAll.CID_RMQ_SYS_PREFIX + group;
boolean resultTopic = BrokerMetricsManager.isSystem(systemTopic, group);
assertThat(resultTopic).isTrue();
boolean resultGroup = BrokerMetricsManager.isSystem(topic, systemGroup);
assertThat(resultGroup).isTrue();
}
@Test
public void testIsSystem_NonSystemTopicAndGroup_ReturnsFalse() {
String topic = "FooTopic";
String group = "FooGroup";
boolean result = BrokerMetricsManager.isSystem(topic, group);
assertThat(result).isFalse();
}
@Test
public void testIsSystem_EmptyTopicAndGroup_ReturnsFalse() {
String topic = "";
String group = "";
boolean result = BrokerMetricsManager.isSystem(topic, group);
assertThat(result).isFalse();
}
@Test
public void testGetMessageTypeAsNormal() {
SendMessageRequestHeader requestHeader = new SendMessageRequestHeader();
requestHeader.setProperties("");
TopicMessageType result = BrokerMetricsManager.getMessageType(requestHeader);
assertThat(TopicMessageType.NORMAL).isEqualTo(result);
}
@Test
public void testGetMessageTypeAsTransaction() {
SendMessageRequestHeader requestHeader = new SendMessageRequestHeader();
Map<String, String> map = new HashMap<>();
map.put(MessageConst.PROPERTY_TRANSACTION_PREPARED, "true");
requestHeader.setProperties(MessageDecoder.messageProperties2String(map));
TopicMessageType result = BrokerMetricsManager.getMessageType(requestHeader);
assertThat(TopicMessageType.TRANSACTION).isEqualTo(result);
}
@Test
public void testGetMessageTypeAsFifo() {
SendMessageRequestHeader requestHeader = new SendMessageRequestHeader();
Map<String, String> map = new HashMap<>();
map.put(MessageConst.PROPERTY_SHARDING_KEY, "shardingKey");
requestHeader.setProperties(MessageDecoder.messageProperties2String(map));
TopicMessageType result = BrokerMetricsManager.getMessageType(requestHeader);
assertThat(TopicMessageType.FIFO).isEqualTo(result);
}
@Test
public void testGetMessageTypeAsDelayLevel() {
SendMessageRequestHeader requestHeader = new SendMessageRequestHeader();
Map<String, String> map = new HashMap<>();
map.put(MessageConst.PROPERTY_DELAY_TIME_LEVEL, "1");
requestHeader.setProperties(MessageDecoder.messageProperties2String(map));
TopicMessageType result = BrokerMetricsManager.getMessageType(requestHeader);
assertThat(TopicMessageType.DELAY).isEqualTo(result);
}
@Test
public void testGetMessageTypeAsDeliverMS() {
SendMessageRequestHeader requestHeader = new SendMessageRequestHeader();
Map<String, String> map = new HashMap<>();
map.put(MessageConst.PROPERTY_TIMER_DELIVER_MS, "10");
requestHeader.setProperties(MessageDecoder.messageProperties2String(map));
TopicMessageType result = BrokerMetricsManager.getMessageType(requestHeader);
assertThat(TopicMessageType.DELAY).isEqualTo(result);
}
@Test
public void testGetMessageTypeAsDelaySEC() {
SendMessageRequestHeader requestHeader = new SendMessageRequestHeader();
Map<String, String> map = new HashMap<>();
map.put(MessageConst.PROPERTY_TIMER_DELAY_SEC, "1");
requestHeader.setProperties(MessageDecoder.messageProperties2String(map));
TopicMessageType result = BrokerMetricsManager.getMessageType(requestHeader);
assertThat(TopicMessageType.DELAY).isEqualTo(result);
}
@Test
public void testGetMessageTypeAsDelayMS() {
SendMessageRequestHeader requestHeader = new SendMessageRequestHeader();
Map<String, String> map = new HashMap<>();
map.put(MessageConst.PROPERTY_TIMER_DELAY_MS, "10");
requestHeader.setProperties(MessageDecoder.messageProperties2String(map));
TopicMessageType result = BrokerMetricsManager.getMessageType(requestHeader);
assertThat(TopicMessageType.DELAY).isEqualTo(result);
}
@Test
public void testGetMessageTypeWithUnknownProperty() {
SendMessageRequestHeader requestHeader = new SendMessageRequestHeader();
Map<String, String> map = new HashMap<>();
map.put("unknownProperty", "unknownValue");
requestHeader.setProperties(MessageDecoder.messageProperties2String(map));
TopicMessageType result = BrokerMetricsManager.getMessageType(requestHeader);
assertThat(TopicMessageType.NORMAL).isEqualTo(result);
}
@Test
public void testGetMessageTypeWithMultipleProperties() {
SendMessageRequestHeader requestHeader = new SendMessageRequestHeader();
Map<String, String> map = new HashMap<>();
map.put(MessageConst.PROPERTY_DELAY_TIME_LEVEL, "1");
map.put(MessageConst.PROPERTY_SHARDING_KEY, "shardingKey");
requestHeader.setProperties(MessageDecoder.messageProperties2String(map));
TopicMessageType result = BrokerMetricsManager.getMessageType(requestHeader);
assertThat(TopicMessageType.FIFO).isEqualTo(result);
}
@Test
public void testGetMessageTypeWithTransactionFlagButOtherPropertiesPresent() {
SendMessageRequestHeader requestHeader = new SendMessageRequestHeader();
Map<String, String> map = new HashMap<>();
map.put(MessageConst.PROPERTY_TRANSACTION_PREPARED, "true");
map.put(MessageConst.PROPERTY_SHARDING_KEY, "shardingKey");
requestHeader.setProperties(MessageDecoder.messageProperties2String(map));
TopicMessageType result = BrokerMetricsManager.getMessageType(requestHeader);
assertThat(TopicMessageType.TRANSACTION).isEqualTo(result);
}
@Test
public void testGetMessageTypeWithEmptyProperties() {
TopicMessageType result = BrokerMetricsManager.getMessageType(new SendMessageRequestHeader());
assertThat(TopicMessageType.NORMAL).isEqualTo(result);
}
@Test
public void testCreateMetricsManager() {
MessageStoreConfig messageStoreConfig = new MessageStoreConfig();
String storePathRootDir = System.getProperty("java.io.tmpdir") + File.separator + "store-"
+ UUID.randomUUID();
messageStoreConfig.setStorePathRootDir(storePathRootDir);
BrokerConfig brokerConfig = new BrokerConfig();
NettyServerConfig nettyServerConfig = new NettyServerConfig();
nettyServerConfig.setListenPort(0);
BrokerController brokerController = new BrokerController(brokerConfig, nettyServerConfig,
new NettyClientConfig(), messageStoreConfig);
BrokerMetricsManager metricsManager = new BrokerMetricsManager(brokerController);
assertThat(metricsManager.getBrokerMeter()).isNull();
}
@Test
public void testCreateMetricsManagerLogType() throws CloneNotSupportedException {
BrokerConfig brokerConfig = new BrokerConfig();
brokerConfig.setMetricsExporterType(MetricsExporterType.LOG);
brokerConfig.setMetricsLabel("label1:value1;label2:value2");
brokerConfig.setMetricsOtelCardinalityLimit(1);
MessageStoreConfig messageStoreConfig = new MessageStoreConfig();
String storePathRootDir = System.getProperty("java.io.tmpdir") + File.separator + "store-"
+ UUID.randomUUID();
messageStoreConfig.setStorePathRootDir(storePathRootDir);
NettyServerConfig nettyServerConfig = new NettyServerConfig();
nettyServerConfig.setListenPort(0);
BrokerController brokerController = new BrokerController(brokerConfig, nettyServerConfig,
new NettyClientConfig(), messageStoreConfig);
brokerController.initialize();
BrokerMetricsManager metricsManager = new BrokerMetricsManager(brokerController);
assertThat(metricsManager.getBrokerMeter()).isNotNull();
}
} | BrokerMetricsManagerTest |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-api/src/main/java/org/apache/dubbo/rpc/InvocationProfilerUtils.java | {
"start": 1041,
"end": 3042
} | class ____ {
public static void enterSimpleProfiler(Invocation invocation, Callable<String> messageCallable) {
if (ProfilerSwitch.isEnableSimpleProfiler()) {
enterProfiler(invocation, messageCallable);
}
}
public static void releaseSimpleProfiler(Invocation invocation) {
if (ProfilerSwitch.isEnableSimpleProfiler()) {
releaseProfiler(invocation);
}
}
public static void enterDetailProfiler(Invocation invocation, Callable<String> messageCallable) {
if (ProfilerSwitch.isEnableDetailProfiler()) {
enterProfiler(invocation, messageCallable);
}
}
public static void releaseDetailProfiler(Invocation invocation) {
if (ProfilerSwitch.isEnableDetailProfiler()) {
releaseProfiler(invocation);
}
}
public static void enterProfiler(Invocation invocation, String message) {
Object fromInvocation = invocation.get(Profiler.PROFILER_KEY);
if (fromInvocation instanceof ProfilerEntry) {
invocation.put(Profiler.PROFILER_KEY, Profiler.enter((ProfilerEntry) fromInvocation, message));
}
}
public static void enterProfiler(Invocation invocation, Callable<String> messageCallable) {
Object fromInvocation = invocation.get(Profiler.PROFILER_KEY);
if (fromInvocation instanceof ProfilerEntry) {
String message = "";
try {
message = messageCallable.call();
} catch (Exception ignore) {
}
invocation.put(Profiler.PROFILER_KEY, Profiler.enter((ProfilerEntry) fromInvocation, message));
}
}
public static void releaseProfiler(Invocation invocation) {
Object fromInvocation = invocation.get(Profiler.PROFILER_KEY);
if (fromInvocation instanceof ProfilerEntry) {
invocation.put(Profiler.PROFILER_KEY, Profiler.release((ProfilerEntry) fromInvocation));
}
}
}
| InvocationProfilerUtils |
java | jhy__jsoup | src/main/java/org/jsoup/nodes/Printer.java | {
"start": 7574,
"end": 8584
} | class ____ extends Pretty {
Outline(Node root, QuietAppendable accum, OutputSettings settings) {
super(root, accum, settings);
}
@Override
boolean isBlockEl(@Nullable Node node) {
return node != null;
}
@Override
boolean shouldIndent(@Nullable Node node) {
if (node == null || node == root || preserveWhitespace || isBlankText(node))
return false;
if (node instanceof TextNode) {
return node.previousSibling() != null || node.nextSibling() != null;
}
return true;
}
}
static Printer printerFor(Node root, QuietAppendable accum) {
OutputSettings settings = NodeUtils.outputSettings(root);
if (settings.outline()) return new Printer.Outline(root, accum, settings);
if (settings.prettyPrint()) return new Printer.Pretty(root, accum, settings);
return new Printer(root, accum, settings);
}
}
| Outline |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/associations/any/PropertyRepository.java | {
"start": 850,
"end": 1744
} | class ____ {
@Id
private Long id;
@ManyToAny
@AnyDiscriminator(DiscriminatorType.STRING)
@Column(name = "property_type")
@AnyKeyJavaClass(Long.class)
@AnyDiscriminatorValue(discriminator = "S", entity = StringProperty.class)
@AnyDiscriminatorValue(discriminator = "I", entity = IntegerProperty.class)
@Cascade(ALL)
@JoinTable(name = "repository_properties",
joinColumns = @JoinColumn(name = "repository_id"),
inverseJoinColumns = @JoinColumn(name = "property_id")
)
private List<Property<?>> properties = new ArrayList<>();
//Getters and setters are omitted for brevity
//end::associations-many-to-any-example[]
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public List<Property<?>> getProperties() {
return properties;
}
//tag::associations-many-to-any-example[]
}
//end::associations-many-to-any-example[]
| PropertyRepository |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/math/Bellard.java | {
"start": 3683,
"end": 6964
} | class ____ implements Container<Summation>, Iterable<Summation> {
private static final long ACCURACY_BIT = 50;
private final Parameter parameter;
private final Summation sigma;
private final Summation[] parts;
private final Tail tail;
/** Constructor */
private <T extends Container<Summation>> Sum(long b, Parameter p, int nParts, List<T> existing) {
if (b < 0)
throw new IllegalArgumentException("b = " + b + " < 0");
if (nParts < 1)
throw new IllegalArgumentException("nParts = " + nParts + " < 1");
final long i = p.j == 1 && p.offsetE >= 0? 1 : 0;
final long e = b + i*p.deltaE + p.offsetE;
final long n = i*p.deltaN + p.j;
this.parameter = p;
this.sigma = new Summation(n, p.deltaN, e, p.deltaE, 0);
this.parts = partition(sigma, nParts, existing);
this.tail = new Tail(n, e);
}
private static <T extends Container<Summation>> Summation[] partition(
Summation sigma, int nParts, List<T> existing) {
final List<Summation> parts = new ArrayList<Summation>();
if (existing == null || existing.isEmpty())
parts.addAll(Arrays.asList(sigma.partition(nParts)));
else {
final long stepsPerPart = sigma.getSteps()/nParts;
final List<Summation> remaining = sigma.remainingTerms(existing);
for(Summation s : remaining) {
final int n = (int)((s.getSteps() - 1)/stepsPerPart) + 1;
parts.addAll(Arrays.asList(s.partition(n)));
}
for(Container<Summation> c : existing)
parts.add(c.getElement());
Collections.sort(parts);
}
return parts.toArray(new Summation[parts.size()]);
}
/** {@inheritDoc} */
@Override
public String toString() {
int n = 0;
for(Summation s : parts)
if (s.getValue() == null)
n++;
return getClass().getSimpleName() + "{" + parameter + ": " + sigma
+ ", remaining=" + n + "}";
}
/** Set the value of sigma */
public void setValue(Summation s) {
if (s.getValue() == null)
throw new IllegalArgumentException("s.getValue()"
+ "\n sigma=" + sigma
+ "\n s =" + s);
if (!s.contains(sigma) || !sigma.contains(s))
throw new IllegalArgumentException("!s.contains(sigma) || !sigma.contains(s)"
+ "\n sigma=" + sigma
+ "\n s =" + s);
sigma.setValue(s.getValue());
}
/** get the value of sigma */
public double getValue() {
if (sigma.getValue() == null) {
double d = 0;
for(int i = 0; i < parts.length; i++)
d = Modular.addMod(d, parts[i].compute());
sigma.setValue(d);
}
final double s = Modular.addMod(sigma.getValue(), tail.compute());
return parameter.isplus? s: -s;
}
/** {@inheritDoc} */
@Override
public Summation getElement() {
if (sigma.getValue() == null) {
int i = 0;
double d = 0;
for(; i < parts.length && parts[i].getValue() != null; i++)
d = Modular.addMod(d, parts[i].getValue());
if (i == parts.length)
sigma.setValue(d);
}
return sigma;
}
/** The sum tail */
private | Sum |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/logging/LoggingActionTests.java | {
"start": 1893,
"end": 8448
} | class ____ extends ESTestCase {
private Logger actionLogger;
private LoggingLevel level;
private TextTemplateEngine engine;
@Before
public void init() throws IOException {
actionLogger = mock(Logger.class);
level = randomFrom(LoggingLevel.values());
engine = mock(TextTemplateEngine.class);
}
public void testExecute() throws Exception {
final ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
WatchExecutionContext ctx = WatcherTestUtils.mockExecutionContextBuilder("_watch_id").time("_watch_id", now).buildMock();
Map<String, Object> triggerModel = new HashMap<>();
triggerModel.put("scheduled_time", now);
triggerModel.put("triggered_time", now);
Map<String, Object> ctxModel = new HashMap<>();
ctxModel.put("id", ctx.id().value());
ctxModel.put("watch_id", "_watch_id");
ctxModel.put("execution_time", now);
ctxModel.put("payload", emptyMap());
ctxModel.put("metadata", emptyMap());
ctxModel.put("vars", emptyMap());
ctxModel.put("trigger", triggerModel);
Map<String, Object> expectedModel = singletonMap("ctx", ctxModel);
String text = randomAlphaOfLength(10);
TextTemplate template = new TextTemplate(text);
LoggingAction action = new LoggingAction(template, level, "_category");
ExecutableLoggingAction executable = new ExecutableLoggingAction(action, logger, actionLogger, engine);
when(engine.render(template, expectedModel)).thenReturn(text);
Action.Result result = executable.execute("_id", ctx, new Payload.Simple());
verifyLogger(actionLogger, level, text);
assertThat(result, notNullValue());
assertThat(result.status(), is(Action.Result.Status.SUCCESS));
assertThat(result, instanceOf(LoggingAction.Result.Success.class));
assertThat(((LoggingAction.Result.Success) result).loggedText(), is(text));
}
public void testParser() throws Exception {
LoggingActionFactory parser = new LoggingActionFactory(engine);
String text = randomAlphaOfLength(10);
TextTemplate template = new TextTemplate(text);
XContentBuilder builder = jsonBuilder().startObject();
builder.field("text", template);
String category = null;
if (randomBoolean()) {
category = randomAlphaOfLength(10);
builder.field("category", category);
}
LoggingLevel level = null;
if (randomBoolean()) {
level = randomFrom(LoggingLevel.values());
builder.field("level", level);
}
builder.endObject();
XContentParser xContentParser = createParser(builder);
xContentParser.nextToken();
ExecutableLoggingAction executable = parser.parseExecutable(randomAlphaOfLength(5), randomAlphaOfLength(3), xContentParser);
assertThat(executable, notNullValue());
assertThat(executable.action().category, is(category));
assertThat(executable.action().level, level == null ? is(LoggingLevel.INFO) : is(level));
assertThat(executable.textLogger(), notNullValue());
assertThat(executable.action().text, notNullValue());
assertThat(executable.action().text, is(template));
}
public void testParserSelfGenerated() throws Exception {
LoggingActionFactory parser = new LoggingActionFactory(engine);
String text = randomAlphaOfLength(10);
TextTemplate template = new TextTemplate(text);
String category = randomAlphaOfLength(10);
LoggingAction action = new LoggingAction(template, level, category);
ExecutableLoggingAction executable = new ExecutableLoggingAction(action, logger, engine);
XContentBuilder builder = jsonBuilder();
executable.toXContent(builder, Attachment.XContent.EMPTY_PARAMS);
XContentParser xContentParser = createParser(builder);
xContentParser.nextToken();
ExecutableLoggingAction parsedAction = parser.parseExecutable(randomAlphaOfLength(5), randomAlphaOfLength(5), xContentParser);
assertThat(parsedAction, equalTo(executable));
}
public void testParserBuilder() throws Exception {
LoggingActionFactory parser = new LoggingActionFactory(engine);
String text = randomAlphaOfLength(10);
TextTemplate template = new TextTemplate(text);
LoggingAction.Builder actionBuilder = loggingAction(template);
if (randomBoolean()) {
actionBuilder.setCategory(randomAlphaOfLength(10));
}
if (randomBoolean()) {
actionBuilder.setLevel(randomFrom(LoggingLevel.values()));
}
LoggingAction action = actionBuilder.build();
XContentBuilder builder = jsonBuilder().value(action);
XContentParser xContentParser = createParser(builder);
assertThat(xContentParser.nextToken(), is(XContentParser.Token.START_OBJECT));
ExecutableLoggingAction executable = parser.parseExecutable(randomAlphaOfLength(4), randomAlphaOfLength(5), xContentParser);
assertThat(executable, notNullValue());
assertThat(executable.action(), is(action));
assertThat(executable.action(), is(action));
assertThat(executable.action(), is(action));
}
public void testParserFailure() throws Exception {
LoggingActionFactory parser = new LoggingActionFactory(engine);
XContentBuilder builder = jsonBuilder().startObject().endObject();
XContentParser xContentParser = createParser(builder);
xContentParser.nextToken();
try {
parser.parseExecutable(randomAlphaOfLength(5), randomAlphaOfLength(5), xContentParser);
fail("Expected failure as there's no text");
} catch (ElasticsearchParseException e) {
assertThat(e.getMessage(), containsString("missing required [text] field"));
}
}
@SuppressLoggerChecks(reason = "mock usage")
static void verifyLogger(Logger logger, LoggingLevel level, String text) {
switch (level) {
case ERROR -> verify(logger, times(1)).error(text);
case WARN -> verify(logger, times(1)).warn(text);
case INFO -> verify(logger, times(1)).info(text);
case DEBUG -> verify(logger, times(1)).debug(text);
case TRACE -> verify(logger, times(1)).trace(text);
default -> fail("unhandled logging level [" + level.name() + "]");
}
}
}
| LoggingActionTests |
java | quarkusio__quarkus | integration-tests/hibernate-orm-data/src/main/java/io/quarkus/it/hibernate/processor/data/puother/MyOtherRepository.java | {
"start": 385,
"end": 700
} | interface ____ extends CrudRepository<MyOtherEntity, Integer> {
@Find
Stream<MyOtherEntity> findAll(Order<MyOtherEntity> order);
@Query("select e from MyOtherEntity e where e.name like :name")
List<MyOtherEntity> findByName(String name);
@Delete
void delete(String name);
}
| MyOtherRepository |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/context/ConfigurableWebApplicationContext.java | {
"start": 1071,
"end": 1522
} | interface ____ to be called before an
* invocation of the {@link #refresh} method inherited from
* {@link org.springframework.context.ConfigurableApplicationContext}.
* They do not cause an initialization of the context on their own.
*
* @author Juergen Hoeller
* @since 05.12.2003
* @see #refresh
* @see ContextLoader#createWebApplicationContext
* @see org.springframework.web.servlet.FrameworkServlet#createWebApplicationContext
*/
public | need |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/entrypoint/component/AbstractUserClassPathJobGraphRetrieverTest.java | {
"start": 1729,
"end": 3348
} | class ____ extends AbstractUserClassPathJobGraphRetriever {
TestJobGraphRetriever(File jobDir) throws IOException {
super(jobDir);
}
@Override
public JobGraph retrieveJobGraph(Configuration configuration) {
throw new UnsupportedOperationException("This method should not be called.");
}
}
@Test
public void testGetUserClassPath() throws IOException {
final File testJobDir = temporaryFolder.newFolder("_test_job");
final Collection<Path> testFiles = FileUtilsTest.prepareTestFiles(testJobDir.toPath());
final Path currentWorkingDirectory = FileUtils.getCurrentWorkingDirectory();
final TestJobGraphRetriever testJobGraphRetriever = new TestJobGraphRetriever(testJobDir);
assertThat(
testJobGraphRetriever.getUserClassPaths(),
containsInAnyOrder(
testFiles.stream()
.map(
file ->
FileUtils.relativizePath(
currentWorkingDirectory, file))
.map(FunctionUtils.uncheckedFunction(FileUtils::toURL))
.toArray()));
}
@Test
public void testGetUserClassPathReturnEmptyListIfJobDirIsNull() throws IOException {
final TestJobGraphRetriever testJobGraphRetriever = new TestJobGraphRetriever(null);
assertTrue(testJobGraphRetriever.getUserClassPaths().isEmpty());
}
}
| TestJobGraphRetriever |
java | spring-projects__spring-framework | spring-tx/src/test/java/org/springframework/transaction/annotation/EnableTransactionManagementTests.java | {
"start": 17780,
"end": 17889
} | class ____ extends TransactionalTestBean {
}
@Configuration
static | TransactionalTestBeanWithInvalidQualifier |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/CanonicalTests.java | {
"start": 9426,
"end": 10385
} | interface ____ {
BinaryOperator<?, ?, ?, ?> create(Source source, Expression left, Expression right);
}
public void testBasicOperators() throws Exception {
List<BinaryOperatorFactory> list = Arrays.asList(
// arithmetic
Add::new,
Mul::new,
// logical
Or::new,
And::new
);
for (BinaryOperatorFactory factory : list) {
Literal left = of(randomInt());
Literal right = of(randomInt());
BinaryOperator<?, ?, ?, ?> first = factory.create(Source.EMPTY, left, right);
BinaryOperator<?, ?, ?, ?> second = factory.create(Source.EMPTY, right, left);
assertNotEquals(first, second);
assertTrue(first.semanticEquals(second));
assertEquals(first, second.swapLeftAndRight());
assertEquals(second, first.swapLeftAndRight());
}
}
| BinaryOperatorFactory |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/test/java/org/apache/hadoop/yarn/server/router/clientrm/TestFederationClientInterceptorRetry.java | {
"start": 3989,
"end": 19124
} | class ____
extends BaseRouterClientRMTest {
private static final Logger LOG =
LoggerFactory.getLogger(TestFederationClientInterceptorRetry.class);
public static Collection<String[]> getParameters() {
return Arrays.asList(new String[][] {{UniformBroadcastPolicyManager.class.getName()},
{TestSequentialBroadcastPolicyManager.class.getName()}});
}
private TestableFederationClientInterceptor interceptor;
private MemoryFederationStateStore stateStore;
private FederationStateStoreTestUtil stateStoreUtil;
private String routerPolicyManagerName;
private String user = "test-user";
// running and registered
private static SubClusterId good;
// registered but not running
private static SubClusterId bad1;
private static SubClusterId bad2;
private static List<SubClusterId> scs = new ArrayList<>();
private void initTestFederationClientInterceptorRetry(String policyManagerName)
throws IOException {
this.routerPolicyManagerName = policyManagerName;
setUp();
}
@Override
public void setUp() throws IOException {
super.setUpConfig();
interceptor = new TestableFederationClientInterceptor();
stateStore = new MemoryFederationStateStore();
stateStore.init(this.getConf());
FederationStateStoreFacade.getInstance(getConf()).reinitialize(stateStore,
getConf());
stateStoreUtil = new FederationStateStoreTestUtil(stateStore);
interceptor.setConf(this.getConf());
interceptor.init(user);
// Create SubClusters
good = SubClusterId.newInstance("0");
bad1 = SubClusterId.newInstance("1");
bad2 = SubClusterId.newInstance("2");
scs.add(good);
scs.add(bad1);
scs.add(bad2);
// The mock RM will not start in these SubClusters, this is done to simulate
// a SubCluster down
interceptor.registerBadSubCluster(bad1);
interceptor.registerBadSubCluster(bad2);
}
@AfterEach
@Override
public void tearDown() {
interceptor.shutdown();
super.tearDown();
}
private void setupCluster(List<SubClusterId> scsToRegister) throws YarnException {
try {
// Clean up the StateStore before every test
stateStoreUtil.deregisterAllSubClusters();
for (SubClusterId sc : scsToRegister) {
stateStoreUtil.registerSubCluster(sc);
}
} catch (YarnException e) {
LOG.error(e.getMessage());
fail();
}
}
@Override
protected YarnConfiguration createConfiguration() {
YarnConfiguration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.FEDERATION_ENABLED, true);
String mockPassThroughInterceptorClass =
PassThroughClientRequestInterceptor.class.getName();
// Create a request interceptor pipeline for testing. The last one in the
// chain is the federation interceptor that calls the mock resource manager.
// The others in the chain will simply forward it to the next one in the
// chain
conf.set(YarnConfiguration.ROUTER_CLIENTRM_INTERCEPTOR_CLASS_PIPELINE,
mockPassThroughInterceptorClass + "," + mockPassThroughInterceptorClass
+ "," + TestableFederationClientInterceptor.class.getName());
conf.set(FEDERATION_POLICY_MANAGER, this.routerPolicyManagerName);
// Disable StateStoreFacade cache
conf.setInt(YarnConfiguration.FEDERATION_CACHE_TIME_TO_LIVE_SECS, 0);
return conf;
}
/**
* This test validates the correctness of GetNewApplication in case the
* cluster is composed of only 1 bad SubCluster.
*/
@ParameterizedTest
@MethodSource("getParameters")
public void testGetNewApplicationOneBadSC(String policyManagerName) throws Exception {
initTestFederationClientInterceptorRetry(policyManagerName);
LOG.info("Test getNewApplication with one bad SubCluster");
setupCluster(Arrays.asList(bad2));
GetNewApplicationRequest request = GetNewApplicationRequest.newInstance();
LambdaTestUtils.intercept(YarnException.class, NO_ACTIVE_SUBCLUSTER_AVAILABLE,
() -> interceptor.getNewApplication(request));
}
/**
* This test validates the correctness of GetNewApplication in case the
* cluster is composed of only 2 bad SubClusters.
*/
@ParameterizedTest
@MethodSource("getParameters")
public void testGetNewApplicationTwoBadSCs(String policyManagerName) throws Exception {
initTestFederationClientInterceptorRetry(policyManagerName);
LOG.info("Test getNewApplication with two bad SubClusters");
setupCluster(Arrays.asList(bad1, bad2));
GetNewApplicationRequest request = GetNewApplicationRequest.newInstance();
LambdaTestUtils.intercept(YarnException.class, NO_ACTIVE_SUBCLUSTER_AVAILABLE,
() -> interceptor.getNewApplication(request));
}
/**
* This test validates the correctness of GetNewApplication in case the
* cluster is composed of only 1 bad SubCluster and 1 good one.
*/
@ParameterizedTest
@MethodSource("getParameters")
public void testGetNewApplicationOneBadOneGood(String policyManagerName)
throws YarnException, IOException {
initTestFederationClientInterceptorRetry(policyManagerName);
LOG.info("Test getNewApplication with one bad, one good SC");
setupCluster(Arrays.asList(good, bad2));
GetNewApplicationRequest request = GetNewApplicationRequest.newInstance();
GetNewApplicationResponse response = interceptor.getNewApplication(request);
assertNotNull(response);
assertEquals(ResourceManager.getClusterTimeStamp(),
response.getApplicationId().getClusterTimestamp());
}
/**
* This test validates the correctness of SubmitApplication in case the
* cluster is composed of only 1 bad SubCluster.
*/
@ParameterizedTest
@MethodSource("getParameters")
public void testSubmitApplicationOneBadSC(String policyManagerName) throws Exception {
initTestFederationClientInterceptorRetry(policyManagerName);
LOG.info("Test submitApplication with one bad SubCluster");
setupCluster(Arrays.asList(bad2));
final ApplicationId appId =
ApplicationId.newInstance(System.currentTimeMillis(), 1);
final SubmitApplicationRequest request = mockSubmitApplicationRequest(appId);
LambdaTestUtils.intercept(YarnException.class, NO_ACTIVE_SUBCLUSTER_AVAILABLE,
() -> interceptor.submitApplication(request));
}
private SubmitApplicationRequest mockSubmitApplicationRequest(ApplicationId appId) {
ContainerLaunchContext amContainerSpec = mock(ContainerLaunchContext.class);
ApplicationSubmissionContext context = ApplicationSubmissionContext
.newInstance(appId, MockApps.newAppName(), "q1",
Priority.newInstance(0), amContainerSpec, false, false, -1,
Resources.createResource(YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB),
"MockApp");
SubmitApplicationRequest request = SubmitApplicationRequest.newInstance(context);
return request;
}
/**
* This test validates the correctness of SubmitApplication in case the
* cluster is composed of only 2 bad SubClusters.
*/
@ParameterizedTest
@MethodSource("getParameters")
public void testSubmitApplicationTwoBadSCs(String policyManagerName) throws Exception {
initTestFederationClientInterceptorRetry(policyManagerName);
LOG.info("Test submitApplication with two bad SubClusters.");
setupCluster(Arrays.asList(bad1, bad2));
final ApplicationId appId =
ApplicationId.newInstance(System.currentTimeMillis(), 1);
final SubmitApplicationRequest request = mockSubmitApplicationRequest(appId);
LambdaTestUtils.intercept(YarnException.class, NO_ACTIVE_SUBCLUSTER_AVAILABLE,
() -> interceptor.submitApplication(request));
}
/**
* This test validates the correctness of SubmitApplication in case the
* cluster is composed of only 1 bad SubCluster and a good one.
*/
@ParameterizedTest
@MethodSource("getParameters")
public void testSubmitApplicationOneBadOneGood(String policyManagerName)
throws YarnException, IOException, InterruptedException {
initTestFederationClientInterceptorRetry(policyManagerName);
LOG.info("Test submitApplication with one bad, one good SC.");
setupCluster(Arrays.asList(good, bad2));
final ApplicationId appId =
ApplicationId.newInstance(System.currentTimeMillis(), 1);
final SubmitApplicationRequest request = mockSubmitApplicationRequest(appId);
SubmitApplicationResponse response = interceptor.submitApplication(request);
assertNotNull(response);
GetApplicationHomeSubClusterRequest getAppRequest =
GetApplicationHomeSubClusterRequest.newInstance(appId);
GetApplicationHomeSubClusterResponse getAppResponse =
stateStore.getApplicationHomeSubCluster(getAppRequest);
assertNotNull(getAppResponse);
ApplicationHomeSubCluster responseHomeSubCluster =
getAppResponse.getApplicationHomeSubCluster();
assertNotNull(responseHomeSubCluster);
SubClusterId respSubClusterId = responseHomeSubCluster.getHomeSubCluster();
assertEquals(good, respSubClusterId);
}
@ParameterizedTest
@MethodSource("getParameters")
public void testSubmitApplicationTwoBadOneGood(String policyManagerName) throws Exception {
initTestFederationClientInterceptorRetry(policyManagerName);
assumeTrue(policyManagerName.equals(TestSequentialBroadcastPolicyManager.class.getName()));
LOG.info("Test submitApplication with two bad, one good SC.");
// This test must require the TestSequentialRouterPolicy policy
assertThat(routerPolicyManagerName).
isEqualTo(TestSequentialBroadcastPolicyManager.class.getName());
setupCluster(Arrays.asList(bad1, bad2, good));
final ApplicationId appId =
ApplicationId.newInstance(System.currentTimeMillis(), 1);
// Use the TestSequentialRouterPolicy strategy,
// which will sort the SubClusterId because good=0, bad1=1, bad2=2
// We will get 2, 1, 0 [bad2, bad1, good]
// Set the retryNum to 1
// 1st time will use bad2, 2nd time will use bad1
// bad1 is updated to stateStore
interceptor.setNumSubmitRetries(1);
final SubmitApplicationRequest request = mockSubmitApplicationRequest(appId);
LambdaTestUtils.intercept(YarnException.class, "RM is stopped",
() -> interceptor.submitApplication(request));
// We will get bad1
checkSubmitSubCluster(appId, bad1);
// Set the retryNum to 2
// 1st time will use bad2, 2nd time will use bad1, 3rd good
interceptor.setNumSubmitRetries(2);
SubmitApplicationResponse submitAppResponse = interceptor.submitApplication(request);
assertNotNull(submitAppResponse);
// We will get good
checkSubmitSubCluster(appId, good);
}
private void checkSubmitSubCluster(ApplicationId appId, SubClusterId expectSubCluster)
throws YarnException {
GetApplicationHomeSubClusterRequest getAppRequest =
GetApplicationHomeSubClusterRequest.newInstance(appId);
GetApplicationHomeSubClusterResponse getAppResponse =
stateStore.getApplicationHomeSubCluster(getAppRequest);
assertNotNull(getAppResponse);
assertNotNull(getAppResponse);
ApplicationHomeSubCluster responseHomeSubCluster =
getAppResponse.getApplicationHomeSubCluster();
assertNotNull(responseHomeSubCluster);
SubClusterId respSubClusterId = responseHomeSubCluster.getHomeSubCluster();
assertEquals(expectSubCluster, respSubClusterId);
}
@ParameterizedTest
@MethodSource("getParameters")
public void testSubmitApplicationTwoBadNodeWithRealError(String policyManagerName)
throws Exception {
initTestFederationClientInterceptorRetry(policyManagerName);
LOG.info("Test submitApplication with two bad SubClusters.");
setupCluster(Arrays.asList(bad1, bad2));
interceptor.setNumSubmitRetries(1);
final ApplicationId appId =
ApplicationId.newInstance(System.currentTimeMillis(), 5);
final SubmitApplicationRequest request = mockSubmitApplicationRequest(appId);
LambdaTestUtils.intercept(YarnException.class, "RM is stopped",
() -> interceptor.submitApplication(request));
}
@ParameterizedTest
@MethodSource("getParameters")
public void testSubmitApplicationOneBadNodeWithRealError(String policyManagerName)
throws Exception {
initTestFederationClientInterceptorRetry(policyManagerName);
LOG.info("Test submitApplication with one bad SubClusters.");
setupCluster(Arrays.asList(bad1));
interceptor.setNumSubmitRetries(0);
final ApplicationId appId =
ApplicationId.newInstance(System.currentTimeMillis(), 6);
final SubmitApplicationRequest request = mockSubmitApplicationRequest(appId);
LambdaTestUtils.intercept(YarnException.class, "RM is stopped",
() -> interceptor.submitApplication(request));
}
@ParameterizedTest
@MethodSource("getParameters")
public void testGetClusterMetricsTwoBadNodeWithRealError(String policyManagerName)
throws Exception {
initTestFederationClientInterceptorRetry(policyManagerName);
LOG.info("Test getClusterMetrics with two bad SubClusters.");
setupCluster(Arrays.asList(bad1, bad2));
GetClusterMetricsRequest request = GetClusterMetricsRequest.newInstance();
LambdaTestUtils.intercept(YarnException.class,
"subClusterId 1 exec getClusterMetrics error RM is stopped.",
() -> interceptor.getClusterMetrics(request));
LambdaTestUtils.intercept(YarnException.class,
"subClusterId 2 exec getClusterMetrics error RM is stopped.",
() -> interceptor.getClusterMetrics(request));
}
@ParameterizedTest
@MethodSource("getParameters")
public void testGetClusterMetricsOneBadNodeWithRealError(String policyManagerName)
throws Exception {
initTestFederationClientInterceptorRetry(policyManagerName);
LOG.info("Test getClusterMetrics with one bad SubClusters.");
setupCluster(Arrays.asList(bad1));
GetClusterMetricsRequest request = GetClusterMetricsRequest.newInstance();
LambdaTestUtils.intercept(YarnException.class,
"subClusterId 1 exec getClusterMetrics error RM is stopped.",
() -> interceptor.getClusterMetrics(request));
}
@ParameterizedTest
@MethodSource("getParameters")
public void testGetClusterMetricsOneBadOneGoodNodeWithRealError(
String policyManagerName) throws Exception {
initTestFederationClientInterceptorRetry(policyManagerName);
LOG.info("Test getClusterMetrics with one bad and one good SubCluster.");
setupCluster(Arrays.asList(bad1, good));
GetClusterMetricsRequest request = GetClusterMetricsRequest.newInstance();
GetClusterMetricsResponse clusterMetrics = interceptor.getClusterMetrics(request);
assertNotNull(clusterMetrics);
// If partial results are not allowed to be returned, an exception will be thrown.
interceptor.setAllowPartialResult(false);
LambdaTestUtils.intercept(YarnException.class,
"subClusterId 1 exec getClusterMetrics error RM is stopped.",
() -> interceptor.getClusterMetrics(request));
interceptor.setAllowPartialResult(true);
}
}
| TestFederationClientInterceptorRetry |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/availability/LivenessState.java | {
"start": 1038,
"end": 1262
} | enum ____ implements AvailabilityState {
/**
* The application is running and its internal state is correct.
*/
CORRECT,
/**
* The application is running but its internal state is broken.
*/
BROKEN
}
| LivenessState |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/insertordering/UpdateOrderingIdentityIdentifierTest.java | {
"start": 1533,
"end": 2911
} | class ____ {
@Test
public void testFailWithDelayedPostInsertIdentifier(EntityManagerFactoryScope scope) {
final Long zooId = scope.fromTransaction( entityManager -> {
final Zoo zoo = new Zoo();
entityManager.persist( zoo );
return zoo.getId();
} );
scope.inEntityManager( entityManager -> {
entityManager.setFlushMode( FlushModeType.COMMIT );
Session session = entityManager.unwrap( Session.class );
session.setHibernateFlushMode( FlushMode.MANUAL );
try {
entityManager.getTransaction().begin();
final Zoo zooExisting = entityManager.find( Zoo.class, zooId );
Zoo zoo = new Zoo();
entityManager.persist( zoo );
entityManager.flush();
Animal animal1 = new Animal();
animal1.setZoo( zoo );
zooExisting.getAnimals().add( animal1 );
Animal animal2 = new Animal();
animal2.setZoo( zoo );
zoo.getAnimals().add( animal2 );
// When allowing delayed identity inserts, this flush would result in a failure due to
// CollectionAction#compareTo using a DelayedPostInsertIdentifier object.
entityManager.flush();
entityManager.getTransaction().commit();
}
catch (Exception e) {
if ( entityManager.getTransaction().isActive() ) {
entityManager.getTransaction().rollback();
}
throw e;
}
} );
}
@Entity(name = "Zoo")
public static | UpdateOrderingIdentityIdentifierTest |
java | spring-projects__spring-security | core/src/test/java/org/springframework/security/aot/hint/PrePostAuthorizeHintsRegistrarTests.java | {
"start": 9474,
"end": 9556
} | class ____ {
@AuthorizeReturnObject
B getB() {
return null;
}
}
static | A |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/component/instance/ComponentInstance.java | {
"start": 26697,
"end": 36662
} | class ____ implements
MultipleArcTransition<ComponentInstance, ComponentInstanceEvent,
ComponentInstanceState> {
@Override
public ComponentInstanceState transition(ComponentInstance instance,
ComponentInstanceEvent event) {
if (instance.upgradeInProgress.compareAndSet(false, true)) {
Component.UpgradeStatus cancelStatus = instance.component
.getCancelUpgradeStatus();
if (instance.getServiceVersion().equals(
cancelStatus.getTargetVersion())) {
// previous upgrade didn't happen so just go back to READY
LOG.info("{} nothing to cancel", event.getContainerId());
cancelStatus.decContainersThatNeedUpgrade();
instance.setContainerState(ContainerState.READY);
ComponentEvent checkState = new ComponentEvent(
instance.component.getName(), ComponentEventType.CHECK_STABLE);
instance.scheduler.getDispatcher().getEventHandler()
.handle(checkState);
return ComponentInstanceState.READY;
} else {
instance.component.decContainersReady(false);
instance.cancelUpgrade();
}
} else {
LOG.info("{} pending cancellation", event.getContainerId());
instance.pendingCancelUpgrade = true;
}
return ComponentInstanceState.CANCEL_UPGRADING;
}
}
private void cancelUpgrade() {
LOG.info("{} cancelling upgrade", container.getId());
setContainerState(ContainerState.UPGRADING);
Component.UpgradeStatus cancelStatus = component.getCancelUpgradeStatus();
reInitHelper(cancelStatus);
}
private void reInitHelper(Component.UpgradeStatus upgradeStatus) {
cancelContainerStatusRetriever();
cancelLclRetriever();
setContainerStatus(container.getId(), null);
scheduler.executorService.submit(() -> cleanupRegistry(container.getId()));
Future<ProviderService.ResolvedLaunchParams> launchParamsFuture =
scheduler.getContainerLaunchService()
.reInitCompInstance(scheduler.getApp(), this,
this.container, this.component.createLaunchContext(
upgradeStatus.getTargetSpec(),
upgradeStatus.getTargetVersion()));
updateResolvedLaunchParams(launchParamsFuture);
}
private void initializeStatusRetriever(ComponentInstanceEvent event,
long initialDelay) {
boolean cancelOnSuccess = true;
if (getCompSpec().getArtifact() != null &&
getCompSpec().getArtifact().getType() == Artifact.TypeEnum.DOCKER) {
// A docker container might get a different IP if the container is
// relaunched by the NM, so we need to keep checking the status.
// This is a temporary fix until the NM provides a callback for
// container relaunch (see YARN-8265).
cancelOnSuccess = false;
}
LOG.info("{} retrieve status after {}", compInstanceId, initialDelay);
containerStatusFuture =
scheduler.executorService.scheduleAtFixedRate(
new ContainerStatusRetriever(scheduler, event.getContainerId(),
this, cancelOnSuccess), initialDelay, 1,
TimeUnit.SECONDS);
}
public ComponentInstanceState getState() {
this.readLock.lock();
try {
return this.stateMachine.getCurrentState();
} finally {
this.readLock.unlock();
}
}
/**
* Returns the version of service at which the instance is at.
*/
public String getServiceVersion() {
this.readLock.lock();
try {
return this.serviceVersion;
} finally {
this.readLock.unlock();
}
}
/**
* Returns the state of the container in the container spec.
*/
public ContainerState getContainerState() {
this.readLock.lock();
try {
return this.containerSpec.getState();
} finally {
this.readLock.unlock();
}
}
/**
* Sets the state of the container in the container spec. It is write
* protected.
*
* @param state container state
*/
public void setContainerState(ContainerState state) {
this.writeLock.lock();
try {
ContainerState curState = containerSpec.getState();
if (!curState.equals(state)) {
containerSpec.setState(state);
LOG.info("{} spec state state changed from {} -> {}",
getCompInstanceId(), curState, state);
}
} finally {
this.writeLock.unlock();
}
}
@Override
public void handle(ComponentInstanceEvent event) {
writeLock.lock();
try {
ComponentInstanceState oldState = getState();
try {
stateMachine.doTransition(event.getType(), event);
} catch (InvalidStateTransitionException e) {
LOG.error(getCompInstanceId() + ": Invalid event " + event.getType() +
" at " + oldState, e);
}
if (oldState != getState()) {
LOG.info(getCompInstanceId() + " Transitioned from " + oldState + " to "
+ getState() + " on " + event.getType() + " event");
}
} finally {
writeLock.unlock();
}
}
public void setContainer(Container container) {
this.container = container;
this.compInstanceId.setContainerId(container.getId());
}
public String getCompInstanceName() {
return compInstanceId.getCompInstanceName();
}
@VisibleForTesting
void updateLocalizationStatuses(
List<org.apache.hadoop.yarn.api.records.LocalizationStatus> statuses) {
Map<String, String> resourcesCpy = new HashMap<>();
readLock.lock();
try {
if (resolvedParams == null || resolvedParams.didLaunchFail() ||
resolvedParams.getResolvedRsrcPaths() == null ||
resolvedParams.getResolvedRsrcPaths().isEmpty()) {
cancelLclRetriever();
return;
}
resourcesCpy.putAll(resolvedParams.getResolvedRsrcPaths());
} finally {
readLock.unlock();
}
boolean allCompleted = true;
Map<String, LocalizationStatus> fromNM = new HashMap<>();
statuses.forEach(statusFromNM -> {
LocalizationStatus lstatus = new LocalizationStatus()
.destFile(statusFromNM.getResourceKey())
.diagnostics(statusFromNM.getDiagnostics())
.state(statusFromNM.getLocalizationState());
fromNM.put(statusFromNM.getResourceKey(), lstatus);
});
for (String resourceKey : resourcesCpy.keySet()) {
LocalizationStatus lstatus = fromNM.get(resourceKey);
if (lstatus == null ||
lstatus.getState().equals(LocalizationState.PENDING)) {
allCompleted = false;
break;
}
}
List<LocalizationStatus> statusList = new ArrayList<>();
statusList.addAll(fromNM.values());
this.containerSpec.setLocalizationStatuses(statusList);
if (allCompleted) {
cancelLclRetriever();
}
}
public void updateResolvedLaunchParams(
Future<ProviderService.ResolvedLaunchParams> future) {
writeLock.lock();
try {
this.resolvedParams = future.get();
} catch (InterruptedException | ExecutionException e) {
LOG.error("{} updating resolved params", getCompInstanceId(), e);
} finally {
writeLock.unlock();
}
}
public ContainerStatus getContainerStatus() {
readLock.lock();
try {
return status;
} finally {
readLock.unlock();
}
}
private void setContainerStatus(ContainerId containerId,
ContainerStatus latestStatus) {
writeLock.lock();
try {
this.status = latestStatus;
org.apache.hadoop.yarn.service.api.records.Container containerRec =
getCompSpec().getContainer(containerId.toString());
if (containerRec != null) {
if (latestStatus != null) {
containerRec.setIp(StringUtils.join(",", latestStatus.getIPs()));
containerRec.setHostname(latestStatus.getHost());
} else {
containerRec.setIp(null);
containerRec.setHostname(null);
}
}
} finally {
writeLock.unlock();
}
}
public void updateContainerStatus(ContainerStatus status) {
org.apache.hadoop.yarn.service.api.records.Container containerRec =
getCompSpec().getContainer(status.getContainerId().toString());
boolean doRegistryUpdate = true;
if (containerRec != null) {
String existingIP = containerRec.getIp();
String newIP = StringUtils.join(",", status.getIPs());
if (existingIP != null && newIP.equals(existingIP)) {
doRegistryUpdate = false;
}
}
ObjectMapper mapper = new ObjectMapper();
try {
Map<String, List<Map<String, String>>> ports = null;
ports = mapper.readValue(status.getExposedPorts(),
new TypeReference<Map<String, List<Map<String, String>>>>(){});
container.setExposedPorts(ports);
} catch (IOException e) {
LOG.warn("Unable to process container ports mapping: {}", e);
}
setContainerStatus(status.getContainerId(), status);
if (containerRec != null && timelineServiceEnabled && doRegistryUpdate) {
serviceTimelinePublisher.componentInstanceIPHostUpdated(containerRec);
}
if (doRegistryUpdate) {
cleanupRegistry(status.getContainerId());
LOG.info(
getCompInstanceId() + " new IP = " + status.getIPs() + ", host = "
+ status.getHost() + ", updating registry");
updateServiceRecord(yarnRegistryOperations, status);
}
}
public String getCompName() {
return compInstanceId.getCompName();
}
public void setCompInstanceDir(Path dir) {
this.compInstanceDir = dir;
}
public Component getComponent() {
return component;
}
public Container getContainer() {
return container;
}
public ComponentInstanceId getCompInstanceId() {
return compInstanceId;
}
public NodeId getNodeId() {
return this.container.getNodeId();
}
private org.apache.hadoop.yarn.service.api.records.Component getCompSpec() {
return component.getComponentSpec();
}
private static | CancelUpgradeTransition |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/AnnotationIntrospector.java | {
"start": 22411,
"end": 27558
} | class ____ rules (sub-classes
* inherit inclusions of super-classes)
*<p>
* Since 2.9 this method may also be called to find "default view(s)" for
* {@link AnnotatedClass}
*
* @param config Effective mapper configuration in use
* @param a Annotated property (represented by a method, field or ctor parameter)
*
* @return Array of views (represented by classes) that the property is included in;
* if null, always included (same as returning array containing <code>Object.class</code>)
*/
public Class<?>[] findViews(MapperConfig<?> config, Annotated a) { return null; }
/**
* Method for finding format annotations for property or class.
* Return value is typically used by serializers and/or
* deserializers to customize presentation aspects of the
* serialized value.
*
* @param config Effective mapper configuration in use
*/
public JsonFormat.Value findFormat(MapperConfig<?> config, Annotated memberOrClass) {
return JsonFormat.Value.empty();
}
/**
* Method used to check if specified property has annotation that indicates
* that it should be wrapped in an element; and if so, name to use.
* Note that not all serializers and deserializers support use this method:
* currently (3.0) it is only used by XML-backed handlers.
*
* @param config Effective mapper configuration in use
*
* @return Wrapper name to use, if any, or {@link PropertyName#USE_DEFAULT}
* to indicate that no wrapper element should be used.
*/
public PropertyName findWrapperName(MapperConfig<?> config, Annotated ann) { return null; }
/**
* Method for finding suggested default value (as simple textual serialization)
* for the property. While core databind does not make any use of it, it is exposed
* for extension modules to use: an expected use is generation of schema representations
* and documentation.
*
* @param config Effective mapper configuration in use
*/
public String findPropertyDefaultValue(MapperConfig<?> config, Annotated ann) { return null; }
/**
* Method used to check whether specified property member (accessor
* or mutator) defines human-readable description to use for documentation.
* There are no further definitions for contents; for example, whether
* these may be marked up using HTML is not defined.
*
* @param config Effective mapper configuration in use
*
* @return Human-readable description, if any.
*/
public String findPropertyDescription(MapperConfig<?> config, Annotated ann) { return null; }
/**
* Method used to check whether specified property member (accessor
* or mutator) defines numeric index, and if so, what is the index value.
* Possible use cases for index values included use by underlying data format
* (some binary formats mandate use of index instead of name) and ordering
* of properties (for documentation, or during serialization).
*
* @param config Effective mapper configuration in use
*
* @return Explicitly specified index for the property, if any
*/
public Integer findPropertyIndex(MapperConfig<?> config, Annotated ann) { return null; }
/**
* Method for finding implicit name for a property that given annotated
* member (field, method, creator parameter) may represent.
* This is different from explicit, annotation-based property name, in that
* it is "weak" and does not either prove that a property exists (for example,
* if visibility is not high enough), or override explicit names.
* In practice this method is used to introspect optional names for creator
* parameters (which may or may not be available and cannot be detected
* by standard databind); or to provide alternate name mangling for
* fields, getters and/or setters.
*
* @param config Effective mapper configuration in use
*/
public String findImplicitPropertyName(MapperConfig<?> config, AnnotatedMember member) { return null; }
/**
* Method called to find if given property has alias(es) defined.
*
* @param config Effective mapper configuration in use
*
* @return `null` if member has no information; otherwise a `List` (possibly
* empty) of aliases to use.
*/
public List<PropertyName> findPropertyAliases(MapperConfig<?> config, Annotated ann) { return null; }
/**
* Method for finding optional access definition for a property, annotated
* on one of its accessors. If a definition for read-only, write-only
* or read-write cases, visibility rules may be modified. Note, however,
* that even more specific annotations (like one for ignoring specific accessor)
* may further override behavior of the access definition.
*
* @param config Effective mapper configuration in use
*/
public JsonProperty.Access findPropertyAccess(MapperConfig<?> config, Annotated ann) { return null; }
/**
* Method called in cases where a | inheritance |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/config/SslConfigs.java | {
"start": 1048,
"end": 8543
} | class ____ {
/*
* NOTE: DO NOT CHANGE EITHER CONFIG NAMES AS THESE ARE PART OF THE PUBLIC API AND CHANGE WILL BREAK USER CODE.
*/
public static final String SSL_PROTOCOL_CONFIG = "ssl.protocol";
public static final String SSL_PROTOCOL_DOC = "The SSL protocol used to generate the SSLContext. The default is 'TLSv1.3', "
+ "which should be fine for most use cases. A typical alternative to the default is 'TLSv1.2'. Allowed values for "
+ "this config are dependent on the JVM. "
+ "Clients using the defaults for this config and 'ssl.enabled.protocols' will downgrade to 'TLSv1.2' if "
+ "the server does not support 'TLSv1.3'. If this config is set to 'TLSv1.2', however, clients will not use 'TLSv1.3' even "
+ "if it is one of the values in <code>ssl.enabled.protocols</code> and the server only supports 'TLSv1.3'.";
public static final String DEFAULT_SSL_PROTOCOL = "TLSv1.3";
public static final String SSL_PROVIDER_CONFIG = "ssl.provider";
public static final String SSL_PROVIDER_DOC = "The name of the security provider used for SSL connections. Default value is the default security provider of the JVM.";
public static final String SSL_CIPHER_SUITES_CONFIG = "ssl.cipher.suites";
public static final String SSL_CIPHER_SUITES_DOC = "A list of cipher suites. This is a named combination of authentication, encryption, MAC and key exchange algorithm used to negotiate the security settings for a network connection using TLS or SSL network protocol. "
+ "By default all the available cipher suites are supported.";
public static final String SSL_ENABLED_PROTOCOLS_CONFIG = "ssl.enabled.protocols";
public static final String SSL_ENABLED_PROTOCOLS_DOC = "The list of protocols enabled for SSL connections. "
+ "The default is 'TLSv1.2,TLSv1.3'. This means that clients and servers will prefer TLSv1.3 if both support it "
+ "and fallback to TLSv1.2 otherwise (assuming both support at least TLSv1.2). This default should be fine for most use "
+ "cases. If this configuration is set to an empty list, Kafka will use the protocols enabled by default in the underlying SSLEngine, "
+ "which may include additional protocols depending on the JVM version. "
+ "Also see the config documentation for <code>ssl.protocol</code> to understand how it can impact the TLS version negotiation behavior.";
public static final String DEFAULT_SSL_ENABLED_PROTOCOLS = "TLSv1.2,TLSv1.3";
public static final String SSL_KEYSTORE_TYPE_CONFIG = "ssl.keystore.type";
public static final String SSL_KEYSTORE_TYPE_DOC = "The file format of the key store file. "
+ "This is optional for client. The values currently supported by the default <code>ssl.engine.factory.class</code> are [JKS, PKCS12, PEM].";
public static final String DEFAULT_SSL_KEYSTORE_TYPE = "JKS";
public static final String SSL_KEYSTORE_KEY_CONFIG = "ssl.keystore.key";
public static final String SSL_KEYSTORE_KEY_DOC = "Private key in the format specified by 'ssl.keystore.type'. "
+ "Default SSL engine factory supports only PEM format with PKCS#8 keys. If the key is encrypted, "
+ "key password must be specified using 'ssl.key.password'";
public static final String SSL_KEYSTORE_CERTIFICATE_CHAIN_CONFIG = "ssl.keystore.certificate.chain";
public static final String SSL_KEYSTORE_CERTIFICATE_CHAIN_DOC = "Certificate chain in the format specified by 'ssl.keystore.type'. "
+ "Default SSL engine factory supports only PEM format with a list of X.509 certificates";
public static final String SSL_TRUSTSTORE_CERTIFICATES_CONFIG = "ssl.truststore.certificates";
public static final String SSL_TRUSTSTORE_CERTIFICATES_DOC = "Trusted certificates in the format specified by 'ssl.truststore.type'. "
+ "Default SSL engine factory supports only PEM format with X.509 certificates.";
public static final String SSL_KEYSTORE_LOCATION_CONFIG = "ssl.keystore.location";
public static final String SSL_KEYSTORE_LOCATION_DOC = "The location of the key store file. "
+ "This is optional for client and can be used for two-way authentication for client.";
public static final String SSL_KEYSTORE_PASSWORD_CONFIG = "ssl.keystore.password";
public static final String SSL_KEYSTORE_PASSWORD_DOC = "The store password for the key store file. "
+ "This is optional for client and only needed if 'ssl.keystore.location' is configured. "
+ "Key store password is not supported for PEM format.";
public static final String SSL_KEY_PASSWORD_CONFIG = "ssl.key.password";
public static final String SSL_KEY_PASSWORD_DOC = "The password of the private key in the key store file or "
+ "the PEM key specified in 'ssl.keystore.key'.";
public static final String SSL_TRUSTSTORE_TYPE_CONFIG = "ssl.truststore.type";
public static final String SSL_TRUSTSTORE_TYPE_DOC = "The file format of the trust store file. The values currently supported by the default <code>ssl.engine.factory.class</code> are [JKS, PKCS12, PEM].";
public static final String DEFAULT_SSL_TRUSTSTORE_TYPE = "JKS";
public static final String SSL_TRUSTSTORE_LOCATION_CONFIG = "ssl.truststore.location";
public static final String SSL_TRUSTSTORE_LOCATION_DOC = "The location of the trust store file.";
public static final String SSL_TRUSTSTORE_PASSWORD_CONFIG = "ssl.truststore.password";
public static final String SSL_TRUSTSTORE_PASSWORD_DOC = "The password for the trust store file. "
+ "If a password is not set, trust store file configured will still be used, but integrity checking is disabled. "
+ "Trust store password is not supported for PEM format.";
public static final String SSL_KEYMANAGER_ALGORITHM_CONFIG = "ssl.keymanager.algorithm";
public static final String SSL_KEYMANAGER_ALGORITHM_DOC = "The algorithm used by key manager factory for SSL connections. "
+ "Default value is the key manager factory algorithm configured for the Java Virtual Machine.";
public static final String DEFAULT_SSL_KEYMANGER_ALGORITHM = KeyManagerFactory.getDefaultAlgorithm();
public static final String SSL_TRUSTMANAGER_ALGORITHM_CONFIG = "ssl.trustmanager.algorithm";
public static final String SSL_TRUSTMANAGER_ALGORITHM_DOC = "The algorithm used by trust manager factory for SSL connections. "
+ "Default value is the trust manager factory algorithm configured for the Java Virtual Machine.";
public static final String DEFAULT_SSL_TRUSTMANAGER_ALGORITHM = TrustManagerFactory.getDefaultAlgorithm();
public static final String SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG = "ssl.endpoint.identification.algorithm";
public static final String SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_DOC = "The endpoint identification algorithm to validate server hostname using server certificate. ";
public static final String DEFAULT_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM = "https";
public static final String SSL_SECURE_RANDOM_IMPLEMENTATION_CONFIG = "ssl.secure.random.implementation";
public static final String SSL_SECURE_RANDOM_IMPLEMENTATION_DOC = "The SecureRandom PRNG implementation to use for SSL cryptography operations. ";
public static final String SSL_ENGINE_FACTORY_CLASS_CONFIG = "ssl.engine.factory.class";
public static final String SSL_ENGINE_FACTORY_CLASS_DOC = "The | SslConfigs |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/PostProcessedMockUserDetailsService.java | {
"start": 819,
"end": 1396
} | class ____ implements UserDetailsService {
private String postProcessorWasHere;
public PostProcessedMockUserDetailsService() {
this.postProcessorWasHere = "Post processor hasn't been yet";
}
public String getPostProcessorWasHere() {
return this.postProcessorWasHere;
}
public void setPostProcessorWasHere(String postProcessorWasHere) {
this.postProcessorWasHere = postProcessorWasHere;
}
@Override
public UserDetails loadUserByUsername(String username) {
throw new UnsupportedOperationException("Not for actual use");
}
}
| PostProcessedMockUserDetailsService |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/select/MySqlSelectTest_286.java | {
"start": 880,
"end": 1499
} | class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "SELECT a " +
"FROM (VALUES 1) t(a) " +
"GROUP BY DISTINCT a GROUPING SETS ((), (t.a))";
SQLStatement stmt = SQLUtils
.parseSingleStatement(sql, DbType.mysql, SQLParserFeature.SupportUnicodeCodePoint);
assertEquals("SELECT a\n" +
"FROM (\n" +
"\tVALUES (1)\n" +
") AS t (a)\n" +
"GROUP BY DISTINCT a\n" +
"\tGROUPING SETS ((), (t.a))", stmt.toString());
}
}
| MySqlSelectTest_286 |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_2800/Issue2866.java | {
"start": 584,
"end": 716
} | class ____{
@JSONField(name="A1")
int a1;
int A2;
@JSONField(name="A3")
public int a3;
}
}
| A |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/JavaType.java | {
"start": 894,
"end": 2261
} | interface ____ abstract class, so instantiation
* may not be possible.
*/
protected final Class<?> _class;
protected final int _hash;
/**
* Optional handler (codec) that can be attached to indicate
* what to use for handling (serializing, deserializing) values of
* this specific type.
*<p>
* Note: untyped (i.e. caller has to cast) because it is used for
* different kinds of handlers, with unrelated types.
*/
protected final Object _valueHandler;
/**
* Optional handler that can be attached to indicate how to handle
* additional type metadata associated with this type.
*<p>
* Note: untyped (i.e. caller has to cast) because it is used for
* different kinds of handlers, with unrelated types.
*/
protected final Object _typeHandler;
/**
* Whether entities defined with this type should be handled using
* static typing (as opposed to dynamic runtime type) or not.
*/
protected final boolean _asStatic;
/*
/**********************************************************************
/* Life-cycle: constructors, public mutant factory methods
/**********************************************************************
*/
/**
* Main base constructor for sub-classes to use
*
* @param raw "Raw" (type-erased) | or |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/immutable/Party.java | {
"start": 221,
"end": 1031
} | class ____ implements Serializable {
private long id;
private long version;
private Contract contract;
private String name;
private Set infos = new HashSet();
public Party() {
super();
}
public Party(String name) {
this.name = name;
}
public long getVersion() {
return version;
}
public void setVersion(long version) {
this.version = version;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public Contract getContract() {
return contract;
}
public void setContract(Contract contract) {
this.contract = contract;
}
public Set getInfos() {
return infos;
}
public void setInfos(Set infos) {
this.infos = infos;
}
}
| Party |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/examples/VirtualThreadExamples.java | {
"start": 649,
"end": 3775
} | class ____ {
public void gettingStarted(Vertx vertx) {
AbstractVerticle verticle = new AbstractVerticle() {
@Override
public void start() {
HttpClient client = vertx.createHttpClient();
HttpClientRequest req = client.request(
HttpMethod.GET,
8080,
"localhost",
"/").await();
HttpClientResponse resp = req.send().await();
int status = resp.statusCode();
Buffer body = resp.body().await();
}
};
// Run the verticle a on virtual thread
vertx.deployVerticle(verticle, new DeploymentOptions().setThreadingModel(ThreadingModel.VIRTUAL_THREAD));
}
private int counter;
public void fieldVisibility1() {
int value = counter;
value += getRemoteValue().await();
// the counter value might have changed
counter = value;
}
public void fieldVisibility2() {
counter += getRemoteValue().await();
}
private Future<Buffer> callRemoteService() {
return null;
}
private Future<Integer> getRemoteValue() {
return null;
}
public void deployVerticle(Vertx vertx, int port) {
vertx.deployVerticle(() -> new AbstractVerticle() {
HttpServer server;
@Override
public void start() {
server = vertx
.createHttpServer()
.requestHandler(req -> {
Buffer res;
try {
res = callRemoteService().await();
} catch (Exception e) {
req.response().setStatusCode(500).end();
return;
}
req.response().end(res);
});
server.listen(port).await();
}
}, new DeploymentOptions()
.setThreadingModel(ThreadingModel.VIRTUAL_THREAD));
}
public void awaitingFutures1(HttpClientResponse response) {
Buffer body = response.body().await();
}
public void awaitingFutures2(HttpClientResponse response, CompletionStage<Buffer> completionStage) {
Buffer body = Future.fromCompletionStage(completionStage).await();
}
public void blockingStream(HttpServer server) {
server.requestHandler(request -> {
Stream<Buffer> blockingStream = request.blockingStream();
HttpServerResponse response = request.response();
response.setChunked(true);
blockingStream
.map(buff -> "" + buff.length())
.forEach(size -> response.write(size));
response.end();
});
}
private Future<String> getRemoteString() {
return null;
}
public void awaitingMultipleFutures() {
Future<String> f1 = getRemoteString();
Future<Integer> f2 = getRemoteValue();
CompositeFuture res = Future.all(f1, f2).await();
String v1 = res.resultAt(0);
Integer v2 = res.resultAt(1);
}
public void threadLocalSupport1(String userId, HttpClient client) {
ThreadLocal<String> local = new ThreadLocal();
local.set(userId);
HttpClientRequest req = client.request(HttpMethod.GET, 8080, "localhost", "/").await();
HttpClientResponse resp = req.send().await();
// Thread local remains the same since it's the same virtual thread
}
}
| VirtualThreadExamples |
java | playframework__playframework | core/play/src/main/java/play/mvc/BodyParser.java | {
"start": 23443,
"end": 25504
} | class ____<A> implements BodyParser<A> {
private final long maxLength;
private final HttpErrorHandler errorHandler;
protected MaxLengthBodyParser(long maxLength, HttpErrorHandler errorHandler) {
this.maxLength = maxLength;
this.errorHandler = errorHandler;
}
CompletionStage<F.Either<Result, A>> requestEntityTooLarge(Http.RequestHeader request) {
return errorHandler
.onClientError(request, Status.REQUEST_ENTITY_TOO_LARGE, "Request entity too large")
.thenApply(F.Either::Left);
}
@Override
public Accumulator<ByteString, F.Either<Result, A>> apply(Http.RequestHeader request) {
Flow<ByteString, ByteString, Future<MaxSizeStatus>> takeUpToFlow =
Flow.fromGraph(play.api.mvc.BodyParsers$.MODULE$.takeUpTo(maxLength));
if (BodyParserUtils.contentLengthHeaderExceedsMaxLength(request.asScala(), maxLength)) {
return Accumulator.done(requestEntityTooLarge(request));
} else {
Sink<ByteString, CompletionStage<F.Either<Result, A>>> result = apply1(request).toSink();
return Accumulator.fromSink(
takeUpToFlow.toMat(
result,
(statusFuture, resultFuture) ->
FutureConverters.asJava(statusFuture)
.thenCompose(
status -> {
if (status instanceof MaxSizeNotExceeded$) {
return resultFuture;
} else {
return requestEntityTooLarge(request);
}
})));
}
}
/**
* Implement this method to implement the actual body parser.
*
* @param request header for the request to parse
* @return the accumulator that parses the request
*/
protected abstract Accumulator<ByteString, F.Either<Result, A>> apply1(
Http.RequestHeader request);
}
/** A body parser that first buffers */
abstract | MaxLengthBodyParser |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/customproviders/ImpliedReadBodyRequestFilterTest.java | {
"start": 3337,
"end": 4246
} | class ____ {
@WithFormRead
@ServerRequestFilter
public void addSuffix(ResteasyReactiveContainerRequestContext containerRequestContext) {
ResteasyReactiveRequestContext rrContext = (ResteasyReactiveRequestContext) containerRequestContext
.getServerRequestContext();
if (containerRequestContext.getMethod().equals("POST")) {
String nameFormParam = (String) rrContext.getFormParameter("name", true, false);
if (nameFormParam != null) {
containerRequestContext.getHeaders().putSingle("suffix", "!".repeat(nameFormParam.length()));
} else {
containerRequestContext.getHeaders().putSingle("suffix", "?");
}
} else {
containerRequestContext.getHeaders().putSingle("suffix", "!");
}
}
}
}
| Filters |
java | spring-projects__spring-boot | module/spring-boot-devtools/src/main/java/org/springframework/boot/devtools/restart/classloader/ClassLoaderFileURLStreamHandler.java | {
"start": 1360,
"end": 1902
} | class ____ extends URLConnection {
Connection(URL url) {
super(url);
}
@Override
public void connect() throws IOException {
}
@Override
public InputStream getInputStream() throws IOException {
byte[] contents = ClassLoaderFileURLStreamHandler.this.file.getContents();
Assert.state(contents != null, "'contents' must not be null");
return new ByteArrayInputStream(contents);
}
@Override
public long getLastModified() {
return ClassLoaderFileURLStreamHandler.this.file.getLastModified();
}
}
}
| Connection |
java | google__auto | value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java | {
"start": 81774,
"end": 81959
} | class ____ {",
" abstract String blim();",
" abstract ImmutableList<String> blam();",
"",
" @AutoValue.Builder",
" public | Baz |
java | spring-projects__spring-security | config/src/main/java/org/springframework/security/config/http/ChannelAttributeFactory.java | {
"start": 1723,
"end": 2466
} | class ____ {
private static final String OPT_REQUIRES_HTTP = "http";
private static final String OPT_REQUIRES_HTTPS = "https";
private static final String OPT_ANY_CHANNEL = "any";
private ChannelAttributeFactory() {
}
public static List<ConfigAttribute> createChannelAttributes(String requiredChannel) {
String channelConfigAttribute = switch (requiredChannel) {
case OPT_REQUIRES_HTTPS -> "REQUIRES_SECURE_CHANNEL";
case OPT_REQUIRES_HTTP -> "REQUIRES_INSECURE_CHANNEL";
case OPT_ANY_CHANNEL -> ChannelDecisionManagerImpl.ANY_CHANNEL;
default -> throw new BeanCreationException("Unknown channel attribute " + requiredChannel);
};
return SecurityConfig.createList(channelConfigAttribute);
}
}
| ChannelAttributeFactory |
java | google__auto | value/src/it/functional/src/test/java/com/google/auto/value/AutoValueTest.java | {
"start": 94521,
"end": 94719
} | class ____<K, V> extends HashMap<K, V> {
private static final long serialVersionUID = 1L;
public MyMap() {}
public MyMap(Map<K, V> map) {
super(map);
}
}
public static | MyMap |
java | apache__thrift | lib/js/test/src/test/Httpd.java | {
"start": 9826,
"end": 12324
} | class ____ extends Thread {
private final ServerSocket serversocket;
private final HttpParams params;
private final HttpService httpService;
public RequestListenerThread(int port, final String docroot) throws IOException {
this.serversocket = new ServerSocket(port);
this.params = new BasicHttpParams();
this.params.setIntParameter(CoreConnectionPNames.SO_TIMEOUT, 1000).setIntParameter(CoreConnectionPNames.SOCKET_BUFFER_SIZE, 8 * 1024)
.setBooleanParameter(CoreConnectionPNames.STALE_CONNECTION_CHECK, false).setBooleanParameter(CoreConnectionPNames.TCP_NODELAY, true)
.setParameter(CoreProtocolPNames.ORIGIN_SERVER, "HttpComponents/1.1");
// Set up the HTTP protocol processor
HttpProcessor httpproc = new BasicHttpProcessor();
// Set up request handlers
HttpRequestHandlerRegistry reqistry = new HttpRequestHandlerRegistry();
reqistry.register("*", new HttpFileHandler(docroot));
// Set up the HTTP service
this.httpService = new HttpService(httpproc, new NoConnectionReuseStrategy(), new DefaultHttpResponseFactory());
this.httpService.setParams(this.params);
this.httpService.setHandlerResolver(reqistry);
}
public void run() {
System.out.println("Listening on port " + this.serversocket.getLocalPort());
System.out.println("Point your browser to http://localhost:8088/test/test.html");
while (!Thread.interrupted()) {
try {
// Set up HTTP connection
Socket socket = this.serversocket.accept();
DefaultHttpServerConnection conn = new DefaultHttpServerConnection();
System.out.println("Incoming connection from " + socket.getInetAddress());
conn.bind(socket, this.params);
// Start worker thread
Thread t = new WorkerThread(this.httpService, conn);
t.setDaemon(true);
t.start();
} catch (InterruptedIOException ex) {
break;
} catch (IOException e) {
System.err.println("I/O error initialising connection thread: " + e.getMessage());
break;
}
}
}
}
static | RequestListenerThread |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/suggest/completion/RegexOptionsTests.java | {
"start": 747,
"end": 2847
} | class ____ extends ESTestCase {
private static final int NUMBER_OF_RUNS = 20;
public static RegexOptions randomRegexOptions() {
final RegexOptions.Builder builder = RegexOptions.builder();
maybeSet(builder::setMaxDeterminizedStates, randomIntBetween(1, 1000));
StringBuilder sb = new StringBuilder();
for (RegexpFlag regexpFlag : RegexpFlag.values()) {
if (randomBoolean()) {
if (sb.length() != 0) {
sb.append("|");
}
sb.append(regexpFlag.name());
}
}
maybeSet(builder::setFlags, sb.toString());
return builder.build();
}
protected RegexOptions createMutation(RegexOptions original) throws IOException {
final RegexOptions.Builder builder = RegexOptions.builder();
builder.setMaxDeterminizedStates(randomValueOtherThan(original.getMaxDeterminizedStates(), () -> randomIntBetween(1, 10)));
return builder.build();
}
/**
* Test serialization and deserialization
*/
public void testSerialization() throws IOException {
for (int i = 0; i < NUMBER_OF_RUNS; i++) {
RegexOptions testOptions = randomRegexOptions();
RegexOptions deserializedModel = copyWriteable(
testOptions,
new NamedWriteableRegistry(Collections.emptyList()),
RegexOptions::new
);
assertEquals(testOptions, deserializedModel);
assertEquals(testOptions.hashCode(), deserializedModel.hashCode());
assertNotSame(testOptions, deserializedModel);
}
}
public void testIllegalArgument() {
final RegexOptions.Builder builder = RegexOptions.builder();
try {
builder.setMaxDeterminizedStates(-randomIntBetween(1, Integer.MAX_VALUE));
fail("max determinized state must be positive");
} catch (IllegalArgumentException e) {
assertEquals(e.getMessage(), "maxDeterminizedStates must not be negative");
}
}
}
| RegexOptionsTests |
java | google__gson | gson/src/test/java/com/google/gson/functional/TypeAdapterRuntimeTypeWrapperTest.java | {
"start": 6380,
"end": 6471
} | class ____ {
@SuppressWarnings("unused")
CyclicBase f;
}
private static | CyclicBase |
java | apache__camel | test-infra/camel-test-infra-weaviate/src/main/java/org/apache/camel/test/infra/weaviate/services/WeaviateInfraService.java | {
"start": 984,
"end": 1146
} | interface ____ extends InfrastructureService {
String getWeaviateEndpointUrl();
String getWeaviateHost();
int getWeaviatePort();
}
| WeaviateInfraService |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/jaxb/internal/stax/LocalXmlResourceResolver.java | {
"start": 639,
"end": 6827
} | class ____ implements javax.xml.stream.XMLResolver {
public static final String CLASSPATH_EXTENSION_URL_BASE = "classpath://";
private final ResourceStreamLocator resourceStreamLocator;
public LocalXmlResourceResolver(ResourceStreamLocator resourceStreamLocator) {
this.resourceStreamLocator = resourceStreamLocator;
}
@Override
public Object resolveEntity(String publicID, String systemID, String baseURI, String namespace) throws XMLStreamException {
JAXB_LOGGER.resolveEntityInvocation( publicID, systemID, baseURI, namespace );
if ( namespace != null ) {
JAXB_LOGGER.interpretingNamespace( namespace );
if ( MappingXsdSupport.latestDescriptor().getNamespaceUri().matches( namespace ) ) {
return openUrlStream( MappingXsdSupport.latestDescriptor() );
}
if ( MappingXsdSupport.jpa10.getNamespaceUri().matches( namespace ) ) {
// JPA 1.0 and 2.0 share the same namespace URI
return openUrlStream( MappingXsdSupport.jpa10 );
}
else if ( MappingXsdSupport.jpa21.getNamespaceUri().matches( namespace ) ) {
// JPA 2.1 and 2.2 share the same namespace URI
return openUrlStream( MappingXsdSupport.jpa21 );
}
else if ( MappingXsdSupport.jpa30.getNamespaceUri().matches( namespace ) ) {
return openUrlStream( MappingXsdSupport.jpa30 );
}
else if ( MappingXsdSupport.jpa31.getNamespaceUri().matches( namespace ) ) {
return openUrlStream( MappingXsdSupport.jpa31 );
}
else if ( MappingXsdSupport.jpa32.getNamespaceUri().matches( namespace ) ) {
return openUrlStream( MappingXsdSupport.jpa32 );
}
else if ( ConfigXsdSupport.getJPA10().getNamespaceUri().matches( namespace ) ) {
// JPA 1.0 and 2.0 share the same namespace URI
return openUrlStream( ConfigXsdSupport.getJPA10() );
}
else if ( ConfigXsdSupport.getJPA21().getNamespaceUri().matches( namespace ) ) {
// JPA 2.1 and 2.2 share the same namespace URI
return openUrlStream( ConfigXsdSupport.getJPA21() );
}
else if ( ConfigXsdSupport.getJPA30().getNamespaceUri().matches( namespace ) ) {
return openUrlStream( ConfigXsdSupport.getJPA30() );
}
else if ( ConfigXsdSupport.getJPA31().getNamespaceUri().matches( namespace ) ) {
return openUrlStream( ConfigXsdSupport.getJPA31() );
}
else if ( MappingXsdSupport.hibernateMappingXml.getNamespaceUri().matches( namespace ) ) {
return openUrlStream( MappingXsdSupport.hibernateMappingXml );
}
else if ( MappingXsdSupport.hbmXml.getNamespaceUri().matches( namespace ) ) {
return openUrlStream( MappingXsdSupport.hbmXml );
}
else if ( ConfigXsdSupport.cfgXsd().getNamespaceUri().matches( namespace ) ) {
return openUrlStream( ConfigXsdSupport.cfgXsd() );
}
}
if ( publicID != null || systemID != null ) {
JAXB_LOGGER.checkingDtdReferences( publicID, systemID );
if ( MAPPING_DTD.matches( publicID, systemID ) ) {
return openUrlStream( MAPPING_DTD.localSchemaUrl );
}
if ( ALTERNATE_MAPPING_DTD.matches( publicID, systemID ) ) {
return openUrlStream( ALTERNATE_MAPPING_DTD.localSchemaUrl );
}
if ( LEGACY_MAPPING_DTD.matches( publicID, systemID ) ) {
DEPRECATION_LOGGER.recognizedObsoleteHibernateNamespace( LEGACY_MAPPING_DTD.getIdentifierBase(), MAPPING_DTD.getIdentifierBase() );
return openUrlStream( MAPPING_DTD.localSchemaUrl );
}
if ( CFG_DTD.matches( publicID, systemID ) ) {
return openUrlStream( CFG_DTD.localSchemaUrl );
}
if ( ALTERNATE_CFG_DTD.matches( publicID, systemID ) ) {
return openUrlStream( ALTERNATE_CFG_DTD.localSchemaUrl );
}
if ( LEGACY_CFG_DTD.matches( publicID, systemID ) ) {
DEPRECATION_LOGGER.recognizedObsoleteHibernateNamespace( LEGACY_CFG_DTD.getIdentifierBase(), CFG_DTD.getIdentifierBase() );
return openUrlStream( CFG_DTD.localSchemaUrl );
}
}
if ( systemID != null ) {
// technically, "classpath://..." identifiers should only be declared as SYSTEM identifiers
if ( systemID.startsWith( CLASSPATH_EXTENSION_URL_BASE ) ) {
JAXB_LOGGER.recognizedClasspathIdentifierAttemptingToResolve( systemID );
final String path = systemID.substring( CLASSPATH_EXTENSION_URL_BASE.length() );
// todo : for this to truly work consistently, we need access to ClassLoaderService
final InputStream stream = resolveInLocalNamespace( path );
if ( stream == null ) {
JAXB_LOGGER.unableToResolveOnClasspath( systemID );
}
else {
JAXB_LOGGER.resolvedOnClasspath( systemID );
}
return stream;
}
}
return null;
}
private InputStream openUrlStream(XsdDescriptor xsdDescriptor) {
return openUrlStream( LocalSchemaLocator.resolveLocalSchemaUrl( xsdDescriptor.getLocalResourceName() ) );
}
private InputStream openUrlStream(URL url) {
try {
return url.openStream();
}
catch (IOException e) {
throw new XmlInfrastructureException( "Could not open url stream : " + url.toExternalForm(), e );
}
}
private InputStream resolveInLocalNamespace(String path) {
try {
return resourceStreamLocator.locateResourceStream( path );
}
catch ( Throwable t ) {
return null;
}
}
public static final DtdDescriptor MAPPING_DTD = new DtdDescriptor(
"www.hibernate.org/dtd/hibernate-mapping",
"org/hibernate/hibernate-mapping-3.0.dtd"
);
public static final DtdDescriptor ALTERNATE_MAPPING_DTD = new DtdDescriptor(
"hibernate.org/dtd/hibernate-mapping",
"org/hibernate/hibernate-mapping-3.0.dtd"
);
public static final DtdDescriptor LEGACY_MAPPING_DTD = new DtdDescriptor(
"hibernate.sourceforge.net/hibernate-mapping",
"org/hibernate/hibernate-mapping-3.0.dtd"
);
public static final DtdDescriptor CFG_DTD = new DtdDescriptor(
"www.hibernate.org/dtd/hibernate-configuration",
"org/hibernate/hibernate-configuration-3.0.dtd"
);
public static final DtdDescriptor ALTERNATE_CFG_DTD = new DtdDescriptor(
"hibernate.org/dtd/hibernate-configuration",
"org/hibernate/hibernate-configuration-3.0.dtd"
);
public static final DtdDescriptor LEGACY_CFG_DTD = new DtdDescriptor(
"hibernate.sourceforge.net/hibernate-configuration",
"org/hibernate/hibernate-configuration-3.0.dtd"
);
public static | LocalXmlResourceResolver |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYear.java | {
"start": 770,
"end": 1247
} | class ____ extends DateTimeFunction {
public DayOfYear(Source source, Expression field, ZoneId zoneId) {
super(source, field, zoneId, DateTimeExtractor.DAY_OF_YEAR);
}
@Override
protected NodeCtor2<Expression, ZoneId, BaseDateTimeFunction> ctorForInfo() {
return DayOfYear::new;
}
@Override
protected UnaryScalarFunction replaceChild(Expression newChild) {
return new DayOfYear(source(), newChild, zoneId());
}
}
| DayOfYear |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/operators/AbstractStreamOperatorFactory.java | {
"start": 1425,
"end": 1544
} | interface ____ enables stream operators to access {@link
* ProcessingTimeService}.
*/
@Experimental
public abstract | which |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/suggest/phrase/SmoothingModel.java | {
"start": 1732,
"end": 3713
} | class ____ override hashCode in the same way that we
* force them to override equals. This also prevents false positives in
* CheckStyle's EqualsHashCode check.
*/
return doHashCode();
}
protected abstract int doHashCode();
public static SmoothingModel fromXContent(XContentParser parser) throws IOException {
XContentParser.Token token;
String fieldName = null;
SmoothingModel model = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (LinearInterpolation.PARSE_FIELD.match(fieldName, parser.getDeprecationHandler())) {
model = LinearInterpolation.fromXContent(parser);
} else if (Laplace.PARSE_FIELD.match(fieldName, parser.getDeprecationHandler())) {
model = Laplace.fromXContent(parser);
} else if (StupidBackoff.PARSE_FIELD.match(fieldName, parser.getDeprecationHandler())) {
model = StupidBackoff.fromXContent(parser);
} else {
throw new IllegalArgumentException("suggester[phrase] doesn't support object field [" + fieldName + "]");
}
} else {
throw new ParsingException(
parser.getTokenLocation(),
"[smoothing] unknown token [" + token + "] after [" + fieldName + "]"
);
}
}
return model;
}
public abstract WordScorerFactory buildWordScorerFactory();
/**
* subtype specific implementation of "equals".
*/
protected abstract boolean doEquals(SmoothingModel other);
protected abstract XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException;
}
| to |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/stats/WatcherStatsResponse.java | {
"start": 1392,
"end": 3048
} | class ____ extends BaseNodesResponse<WatcherStatsResponse.Node> implements ToXContentObject {
private final WatcherMetadata watcherMetadata;
public WatcherStatsResponse(
ClusterName clusterName,
WatcherMetadata watcherMetadata,
List<Node> nodes,
List<FailedNodeException> failures
) {
super(clusterName, nodes, failures);
this.watcherMetadata = watcherMetadata;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
TransportAction.localOnly();
}
@Override
protected List<Node> readNodesFrom(StreamInput in) throws IOException {
return TransportAction.localOnly();
}
@Override
protected void writeNodesTo(StreamOutput out, List<Node> nodes) throws IOException {
TransportAction.localOnly();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
ChunkedToXContent.wrapAsToXContent(watcherMetadata).toXContent(builder, params);
builder.startArray("stats");
for (Node node : getNodes()) {
node.toXContent(builder, params);
}
builder.endArray();
return builder;
}
/**
* Sum all watches across all nodes to get a total count of watches in the cluster
*
* @return The sum of all watches being executed
*/
public long getWatchesCount() {
return getNodes().stream().mapToLong(WatcherStatsResponse.Node::getWatchesCount).sum();
}
public WatcherMetadata watcherMetadata() {
return watcherMetadata;
}
public static | WatcherStatsResponse |
java | google__dagger | javatests/dagger/hilt/android/EarlyEntryPointNoEntryPointsDefinedTest.java | {
"start": 2001,
"end": 2710
} | class ____ {
@Inject
Foo() {}
}
@Rule public HiltAndroidRule rule = new HiltAndroidRule(this);
@Test
public void testEarlyComponentDoesNotExist() throws Exception {
HiltTestApplication app = (HiltTestApplication) getApplicationContext();
TestApplicationComponentManager componentManager =
(TestApplicationComponentManager) app.componentManager();
RuntimeException exception =
assertThrows(RuntimeException.class, () -> componentManager.earlySingletonComponent());
assertThat(exception)
.hasMessageThat()
.contains(
"The EarlyComponent was requested but does not exist. Check that you have "
+ "annotated your test | Foo |
java | apache__kafka | connect/runtime/src/test/resources/test-plugins/bad-packaging/test/plugins/OuterClass.java | {
"start": 1038,
"end": 1260
} | class ____ testing classloading isolation.
* See {@link org.apache.kafka.connect.runtime.isolation.TestPlugins}.
* <p>Defines a connector as a non-static inner class, which does not have a default constructor.
*/
public | for |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java | {
"start": 993,
"end": 2726
} | class ____ extends BaseRestHandler {
private static final Logger LOGGER = LogManager.getLogger(RestEsqlQueryAction.class);
@Override
public String getName() {
return "esql_query";
}
@Override
public List<Route> routes() {
return List.of(new Route(POST, "/_query"));
}
@Override
public Set<String> supportedCapabilities() {
return EsqlCapabilities.CAPABILITIES;
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
try (XContentParser parser = request.contentOrSourceParamParser()) {
return restChannelConsumer(RequestXContent.parseSync(parser), request, client);
}
}
protected static RestChannelConsumer restChannelConsumer(EsqlQueryRequest esqlRequest, RestRequest request, NodeClient client) {
final Boolean partialResults = request.paramAsBoolean("allow_partial_results", null);
if (partialResults != null) {
esqlRequest.allowPartialResults(partialResults);
}
LOGGER.debug("Beginning execution of ESQL query.\nQuery string: [{}]", esqlRequest.query());
return channel -> {
RestCancellableNodeClient cancellableClient = new RestCancellableNodeClient(client, request.getHttpChannel());
cancellableClient.execute(
EsqlQueryAction.INSTANCE,
esqlRequest,
new EsqlResponseListener(channel, request, esqlRequest).wrapWithLogging()
);
};
}
@Override
protected Set<String> responseParams() {
return Set.of(URL_PARAM_DELIMITER, EsqlQueryResponse.DROP_NULL_COLUMNS_OPTION);
}
}
| RestEsqlQueryAction |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/aot/hint/BindingReflectionHintsRegistrarTests.java | {
"start": 16677,
"end": 17043
} | class ____ {
private String name;
public static Builder newInstance() {
return new Builder();
}
public Builder name(String name) {
this.name = name;
return this;
}
public SampleRecordWithJacksonCustomStrategy build() {
return new SampleRecordWithJacksonCustomStrategy(name);
}
}
}
@SuppressWarnings("serial")
static | Builder |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/context/metrics/buffering/BufferedStartupStep.java | {
"start": 2820,
"end": 3141
} | class ____ implements Tag {
private final String key;
private final String value;
DefaultTag(String key, String value) {
this.key = key;
this.value = value;
}
@Override
public String getKey() {
return this.key;
}
@Override
public String getValue() {
return this.value;
}
}
}
| DefaultTag |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/authentication/logout/LogoutFilter.java | {
"start": 2185,
"end": 6036
} | class ____ extends GenericFilterBean {
private SecurityContextHolderStrategy securityContextHolderStrategy = SecurityContextHolder
.getContextHolderStrategy();
private RequestMatcher logoutRequestMatcher;
private final LogoutHandler handler;
private final LogoutSuccessHandler logoutSuccessHandler;
/**
* Constructor which takes a <tt>LogoutSuccessHandler</tt> instance to determine the
* target destination after logging out. The list of <tt>LogoutHandler</tt>s are
* intended to perform the actual logout functionality (such as clearing the security
* context, invalidating the session, etc.).
*/
@SuppressWarnings("NullAway") // Dataflow analysis limitation
public LogoutFilter(LogoutSuccessHandler logoutSuccessHandler, LogoutHandler... handlers) {
this.handler = new CompositeLogoutHandler(handlers);
Assert.notNull(logoutSuccessHandler, "logoutSuccessHandler cannot be null");
this.logoutSuccessHandler = logoutSuccessHandler;
setFilterProcessesUrl("/logout");
}
@SuppressWarnings("NullAway") // Dataflow analysis limitation
public LogoutFilter(String logoutSuccessUrl, LogoutHandler... handlers) {
this.handler = new CompositeLogoutHandler(handlers);
Assert.isTrue(!StringUtils.hasLength(logoutSuccessUrl) || UrlUtils.isValidRedirectUrl(logoutSuccessUrl),
() -> logoutSuccessUrl + " isn't a valid redirect URL");
SimpleUrlLogoutSuccessHandler urlLogoutSuccessHandler = new SimpleUrlLogoutSuccessHandler();
if (StringUtils.hasText(logoutSuccessUrl)) {
urlLogoutSuccessHandler.setDefaultTargetUrl(logoutSuccessUrl);
}
this.logoutSuccessHandler = urlLogoutSuccessHandler;
setFilterProcessesUrl("/logout");
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
throws IOException, ServletException {
doFilter((HttpServletRequest) request, (HttpServletResponse) response, chain);
}
private void doFilter(HttpServletRequest request, HttpServletResponse response, FilterChain chain)
throws IOException, ServletException {
if (requiresLogout(request, response)) {
Authentication auth = this.securityContextHolderStrategy.getContext().getAuthentication();
if (this.logger.isDebugEnabled()) {
this.logger.debug(LogMessage.format("Logging out [%s]", auth));
}
this.handler.logout(request, response, auth);
this.logoutSuccessHandler.onLogoutSuccess(request, response, auth);
return;
}
chain.doFilter(request, response);
}
/**
* Allow subclasses to modify when a logout should take place.
* @param request the request
* @param response the response
* @return <code>true</code> if logout should occur, <code>false</code> otherwise
*/
protected boolean requiresLogout(HttpServletRequest request, HttpServletResponse response) {
if (this.logoutRequestMatcher.matches(request)) {
return true;
}
if (this.logger.isTraceEnabled()) {
this.logger.trace(LogMessage.format("Did not match request to %s", this.logoutRequestMatcher));
}
return false;
}
/**
* Sets the {@link SecurityContextHolderStrategy} to use. The default action is to use
* the {@link SecurityContextHolderStrategy} stored in {@link SecurityContextHolder}.
*
* @since 5.8
*/
public void setSecurityContextHolderStrategy(SecurityContextHolderStrategy securityContextHolderStrategy) {
Assert.notNull(securityContextHolderStrategy, "securityContextHolderStrategy cannot be null");
this.securityContextHolderStrategy = securityContextHolderStrategy;
}
public void setLogoutRequestMatcher(RequestMatcher logoutRequestMatcher) {
Assert.notNull(logoutRequestMatcher, "logoutRequestMatcher cannot be null");
this.logoutRequestMatcher = logoutRequestMatcher;
}
public void setFilterProcessesUrl(String filterProcessesUrl) {
this.logoutRequestMatcher = pathPattern(filterProcessesUrl);
}
}
| LogoutFilter |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/onetoone/unidirectional/Unidirectional.java | {
"start": 713,
"end": 4290
} | class ____ {
private Integer ed1_id;
private Integer ed2_id;
private Integer ed3_id;
private Integer ed4_id;
private Integer ing1_id;
private Integer ing2_id;
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
// Revision 1
scope.inTransaction( em -> {
UniRefEdEntity ed1 = new UniRefEdEntity( 1, "data_ed_1" );
UniRefEdEntity ed2 = new UniRefEdEntity( 2, "data_ed_2" );
UniRefEdEntity ed3 = new UniRefEdEntity( 3, "data_ed_2" );
UniRefEdEntity ed4 = new UniRefEdEntity( 4, "data_ed_2" );
UniRefIngEntity ing1 = new UniRefIngEntity( 5, "data_ing_1", ed1 );
UniRefIngEntity ing2 = new UniRefIngEntity( 6, "data_ing_2", ed3 );
em.persist( ed1 );
em.persist( ed2 );
em.persist( ed3 );
em.persist( ed4 );
em.persist( ing1 );
em.persist( ing2 );
ed1_id = ed1.getId();
ed2_id = ed2.getId();
ed3_id = ed3.getId();
ed4_id = ed4.getId();
ing1_id = ing1.getId();
ing2_id = ing2.getId();
} );
// Revision 2
scope.inTransaction( em -> {
UniRefIngEntity ing1 = em.find( UniRefIngEntity.class, ing1_id );
UniRefEdEntity ed2 = em.find( UniRefEdEntity.class, ed2_id );
ing1.setReference( ed2 );
} );
// Revision 3
scope.inTransaction( em -> {
UniRefIngEntity ing2 = em.find( UniRefIngEntity.class, ing2_id );
UniRefEdEntity ed4 = em.find( UniRefEdEntity.class, ed4_id );
ing2.setReference( ed4 );
} );
}
@Test
public void testRevisionsCounts(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
assertEquals( Arrays.asList( 1 ), auditReader.getRevisions( UniRefEdEntity.class, ed1_id ) );
assertEquals( Arrays.asList( 1 ), auditReader.getRevisions( UniRefEdEntity.class, ed2_id ) );
assertEquals( Arrays.asList( 1 ), auditReader.getRevisions( UniRefEdEntity.class, ed3_id ) );
assertEquals( Arrays.asList( 1 ), auditReader.getRevisions( UniRefEdEntity.class, ed4_id ) );
assertEquals( Arrays.asList( 1, 2 ), auditReader.getRevisions( UniRefIngEntity.class, ing1_id ) );
assertEquals( Arrays.asList( 1, 3 ), auditReader.getRevisions( UniRefIngEntity.class, ing2_id ) );
} );
}
@Test
public void testHistoryOfIngId1(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
UniRefEdEntity ed1 = em.find( UniRefEdEntity.class, ed1_id );
UniRefEdEntity ed2 = em.find( UniRefEdEntity.class, ed2_id );
UniRefIngEntity rev1 = auditReader.find( UniRefIngEntity.class, ing1_id, 1 );
UniRefIngEntity rev2 = auditReader.find( UniRefIngEntity.class, ing1_id, 2 );
UniRefIngEntity rev3 = auditReader.find( UniRefIngEntity.class, ing1_id, 3 );
assertEquals( ed1, rev1.getReference() );
assertEquals( ed2, rev2.getReference() );
assertEquals( ed2, rev3.getReference() );
} );
}
@Test
public void testHistoryOfIngId2(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
UniRefEdEntity ed3 = em.find( UniRefEdEntity.class, ed3_id );
UniRefEdEntity ed4 = em.find( UniRefEdEntity.class, ed4_id );
UniRefIngEntity rev1 = auditReader.find( UniRefIngEntity.class, ing2_id, 1 );
UniRefIngEntity rev2 = auditReader.find( UniRefIngEntity.class, ing2_id, 2 );
UniRefIngEntity rev3 = auditReader.find( UniRefIngEntity.class, ing2_id, 3 );
assertEquals( ed3, rev1.getReference() );
assertEquals( ed3, rev2.getReference() );
assertEquals( ed4, rev3.getReference() );
} );
}
}
| Unidirectional |
java | quarkusio__quarkus | devtools/cli/src/main/java/io/quarkus/cli/image/Docker.java | {
"start": 791,
"end": 1962
} | class ____ extends BaseImageSubCommand {
private static final String DOCKER = "docker";
private static final String DOCKER_CONFIG_PREFIX = "quarkus.docker.";
private static final String DOCKERFILE_JVM_PATH = "dockerfile-jvm-path";
private static final String DOCKERFILE_NATIVE_PATH = "dockerfile-native-path";
@CommandLine.Option(order = 7, names = { "--dockerfile" }, description = "The path to the Dockerfile.")
public Optional<String> dockerFile;
@Override
public void populateContext(BuildToolContext context) {
Map<String, String> properties = context.getPropertiesOptions().properties;
properties.put(QUARKUS_CONTAINER_IMAGE_BUILDER, DOCKER);
dockerFile.ifPresent(d -> properties.put(
DOCKER_CONFIG_PREFIX + (context.getBuildOptions().buildNative ? DOCKERFILE_NATIVE_PATH : DOCKERFILE_JVM_PATH),
d));
context.getForcedExtensions().add(QUARKUS_CONTAINER_IMAGE_EXTENSION_KEY_PREFIX + DOCKER);
}
@Override
public String toString() {
return "Docker {imageOptions='" + imageOptions + "', dockerFile:'" + dockerFile.orElse("<none>") + "'}";
}
}
| Docker |
java | resilience4j__resilience4j | resilience4j-spring-cloud2/src/test/java/io/github/resilience4j/ratelimiter/autoconfigure/RefreshScopedRateLimiterConfigurationTest.java | {
"start": 2088,
"end": 2172
} | class ____ extends RefreshScopedRateLimiterAutoConfiguration {
}
} | RateLimiterConfig |
java | apache__camel | components/camel-jetty-common/src/main/java/org/apache/camel/component/jetty/JettyHttpComponent.java | {
"start": 5889,
"end": 60599
} | class ____ {
final Server server;
final Connector connector;
final CamelServlet servlet;
int refCount;
ConnectorRef(Server server, Connector connector, CamelServlet servlet) {
this.server = server;
this.connector = connector;
this.servlet = servlet;
increment();
}
public int increment() {
return ++refCount;
}
public int decrement() {
return --refCount;
}
public int getRefCount() {
return refCount;
}
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
// must extract well known parameters before we create the endpoint
List<Handler> handlerList = resolveAndRemoveReferenceListParameter(parameters, "handlers", Handler.class);
HttpBinding binding = resolveAndRemoveReferenceParameter(parameters, "httpBindingRef", HttpBinding.class);
if (binding != null) {
// TODO: remove httpBindingRef in the future
LOG.warn("Using httpBindingRef is deprecated, use httpBinding=#beanId instead");
}
Boolean enableJmx = getAndRemoveParameter(parameters, "enableJmx", Boolean.class);
Boolean enableMultipartFilter = getAndRemoveParameter(parameters, "enableMultipartFilter",
Boolean.class, true);
Filter multipartFilter = resolveAndRemoveReferenceParameter(parameters, "multipartFilterRef", Filter.class);
if (binding != null) {
// TODO: remove multipartFilterRef in the future
LOG.warn("Using multipartFilterRef is deprecated, use multipartFilter=#beanId instead");
}
List<Filter> filters = resolveAndRemoveReferenceListParameter(parameters, "filters", Filter.class);
Boolean enableCors = getAndRemoveParameter(parameters, "enableCORS", Boolean.class, false);
HeaderFilterStrategy headerFilterStrategy
= resolveAndRemoveReferenceParameter(parameters, "headerFilterStrategy", HeaderFilterStrategy.class);
SSLContextParameters sslContextParameters
= resolveAndRemoveReferenceParameter(parameters, "sslContextParameters", SSLContextParameters.class);
SSLContextParameters ssl = sslContextParameters != null ? sslContextParameters : this.sslContextParameters;
ssl = ssl != null ? ssl : retrieveGlobalSslContextParameters();
String proxyHost = getAndRemoveParameter(parameters, "proxyHost", String.class, getProxyHost());
Integer proxyPort = getAndRemoveParameter(parameters, "proxyPort", Integer.class, getProxyPort());
Boolean async = getAndRemoveParameter(parameters, "async", Boolean.class);
boolean muteException = getAndRemoveParameter(parameters, "muteException", boolean.class, isMuteException());
String filesLocation = getAndRemoveParameter(parameters, "filesLocation", String.class, getFilesLocation());
Integer fileSizeThreshold
= getAndRemoveParameter(parameters, "fileSizeThreshold", Integer.class, getFileSizeThreshold());
Long maxFileSize = getAndRemoveParameter(parameters, "maxFileSize", Long.class, getMaxFileSize());
Long maxRequestSize = getAndRemoveParameter(parameters, "maxRequestSize", Long.class, getMaxRequestSize());
// extract filterInit. parameters
Map filterInitParameters = PropertiesHelper.extractProperties(parameters, "filterInit.");
URI addressUri = new URI(UnsafeUriCharactersEncoder.encodeHttpURI(remaining));
URI endpointUri = URISupport.createRemainingURI(addressUri, parameters);
// need to keep the httpMethodRestrict parameter for the endpointUri
String httpMethodRestrict = getAndRemoveParameter(parameters, "httpMethodRestrict", String.class);
// restructure uri to be based on the parameters left as we dont want to include the Camel internal options
URI httpUri = URISupport.createRemainingURI(addressUri, parameters);
// create endpoint after all known parameters have been extracted from parameters
// include component scheme in the uri
String scheme = StringHelper.before(uri, ":");
endpointUri = new URI(scheme + ":" + endpointUri);
JettyHttpEndpoint endpoint = createEndpoint(endpointUri, httpUri);
if (async != null) {
endpoint.setAsync(async);
}
endpoint.setMuteException(muteException);
if (headerFilterStrategy != null) {
endpoint.setHeaderFilterStrategy(headerFilterStrategy);
} else {
setEndpointHeaderFilterStrategy(endpoint);
}
// setup the proxy host and proxy port
if (proxyHost != null) {
endpoint.setProxyHost(proxyHost);
endpoint.setProxyPort(proxyPort);
}
if (!filterInitParameters.isEmpty()) {
endpoint.setFilterInitParameters(filterInitParameters);
}
if (!handlerList.isEmpty()) {
endpoint.setHandlers(handlerList);
}
// prefer to use endpoint configured over component configured
if (binding == null) {
// fallback to component configured
binding = getHttpBinding();
}
if (binding != null) {
endpoint.setHttpBinding(binding);
}
if (enableJmx != null) {
endpoint.setEnableJmx(enableJmx);
} else {
// set this option based on setting of JettyHttpComponent
endpoint.setEnableJmx(isEnableJmx());
}
endpoint.setEnableMultipartFilter(enableMultipartFilter);
if (multipartFilter != null) {
endpoint.setMultipartFilter(multipartFilter);
endpoint.setEnableMultipartFilter(true);
}
if (enableCors) {
endpoint.setEnableCORS(enableCors);
if (filters == null) {
filters = new ArrayList<>(1);
}
filters.add(new CrossOriginFilter());
}
if (filters != null) {
endpoint.setFilters(filters);
}
if (httpMethodRestrict != null) {
endpoint.setHttpMethodRestrict(httpMethodRestrict);
}
if (ssl != null) {
endpoint.setSslContextParameters(ssl);
}
endpoint.setSendServerVersion(isSendServerVersion());
endpoint.setFilesLocation(filesLocation);
endpoint.setFileSizeThreshold(fileSizeThreshold);
endpoint.setMaxFileSize(maxFileSize);
endpoint.setMaxRequestSize(maxRequestSize);
setProperties(endpoint, parameters);
// re-create http uri after all parameters has been set on the endpoint, as the remainders are for http uri
httpUri = URISupport.createRemainingURI(addressUri, parameters);
endpoint.setHttpUri(httpUri);
return endpoint;
}
protected abstract JettyHttpEndpoint createEndpoint(URI endpointUri, URI httpUri) throws URISyntaxException;
@Override
public boolean canConnect(HttpConsumer consumer) throws Exception {
// Make sure that there is a connector for the requested endpoint.
JettyHttpEndpoint endpoint = (JettyHttpEndpoint) consumer.getEndpoint();
String connectorKey = getConnectorKey(endpoint);
ConnectorRef connectorRef = CONNECTORS.get(connectorKey);
// check if there are already another consumer on the same context-path and if so fail
if (connectorRef != null) {
for (Map.Entry<String, HttpConsumer> entry : connectorRef.servlet.getConsumers().entrySet()) {
String path = entry.getValue().getPath();
CamelContext camelContext = entry.getValue().getEndpoint().getCamelContext();
if (consumer.getPath().equals(path)) {
// its allowed if they are from the same camel context
boolean sameContext = consumer.getEndpoint().getCamelContext() == camelContext;
if (!sameContext) {
return false;
}
}
}
}
return true;
}
/**
* Connects the URL specified on the endpoint to the specified processor.
*/
@Override
public void connect(HttpConsumer consumer) throws Exception {
// Make sure that there is a connector for the requested endpoint.
JettyHttpEndpoint endpoint = (JettyHttpEndpoint) consumer.getEndpoint();
String connectorKey = getConnectorKey(endpoint);
try {
CONNECTORS.compute(connectorKey, (cKey, connectorRef) -> {
try {
return connect(consumer, endpoint, cKey, connectorRef);
} catch (Exception e) {
throw new RuntimeCamelException(e);
}
});
} catch (RuntimeCamelException e) {
throw (Exception) e.getCause();
}
}
private ConnectorRef connect(
HttpConsumer consumer, JettyHttpEndpoint endpoint, String connectorKey, ConnectorRef connectorRef)
throws Exception {
if (connectorRef == null) {
Server server = createServer();
Connector connector = getConnector(server, endpoint);
if ("localhost".equalsIgnoreCase(endpoint.getHttpUri().getHost())) {
LOG.warn("You use localhost interface! It means that no external connections will be available. "
+ "Don't you want to use 0.0.0.0 instead (all network interfaces)? {}",
endpoint);
}
if (endpoint.isEnableJmx()) {
enableJmx(server);
}
server.addConnector(connector);
connectorRef = new ConnectorRef(
server, connector,
createServletForConnector(server, connector, endpoint.getHandlers(), endpoint));
// must enable session before we start
if (endpoint.isSessionSupport()) {
enableSessionSupport(connectorRef.server, connectorKey);
}
connectorRef.server.start();
LOG.debug("Adding connector key: {} -> {}", connectorKey, connectorRef);
} else {
LOG.debug("Using existing connector key: {} -> {}", connectorKey, connectorRef);
// check if there are any new handlers, and if so then we need to re-start the server
if (endpoint.getHandlers() != null && !endpoint.getHandlers().isEmpty()) {
List<Handler> existingHandlers = new ArrayList<>();
if (connectorRef.server.getHandlers() != null && !connectorRef.server.getHandlers().isEmpty()) {
existingHandlers = connectorRef.server.getHandlers();
}
List<Handler> newHandlers = new ArrayList<>(endpoint.getHandlers());
boolean changed = !existingHandlers.containsAll(newHandlers) && !newHandlers.containsAll(existingHandlers);
if (changed) {
LOG.debug("Restarting Jetty server due to adding new Jetty Handlers: {}", newHandlers);
connectorRef.server.stop();
addJettyHandlers(connectorRef.server, endpoint.getHandlers());
connectorRef.server.start();
}
}
// check the session support
if (endpoint.isSessionSupport()) {
enableSessionSupport(connectorRef.server, connectorKey);
}
// ref track the connector
connectorRef.increment();
}
if (endpoint.isEnableMultipartFilter()) {
enableMultipartFilter(endpoint, connectorRef.server);
}
if (endpoint.getFilters() != null && !endpoint.getFilters().isEmpty()) {
setFilters(endpoint, connectorRef.server);
}
connectorRef.servlet.connect(consumer);
return connectorRef;
}
private void enableJmx(Server server) {
MBeanContainer containerToRegister = getMbContainer();
if (containerToRegister != null) {
LOG.info("Jetty JMX Extensions is enabled");
addServerMBean(server);
// Since we may have many Servers running, don't tie the MBeanContainer
// to a Server lifecycle or we end up closing it while it is still in use.
//server.addBean(mbContainer);
}
}
private void enableSessionSupport(Server server, String connectorKey) {
ServletContextHandler context = server.getDescendant(ServletContextHandler.class);
if (context.getSessionHandler() == null) {
SessionHandler sessionHandler = new SessionHandler();
if (context.isStarted()) {
throw new IllegalStateException(
"Server has already been started. Cannot enabled sessionSupport on " + connectorKey);
} else {
context.setSessionHandler(sessionHandler);
}
}
}
private void setFilters(JettyHttpEndpoint endpoint, Server server) {
ServletContextHandler context = server.getDescendant(ServletContextHandler.class);
List<Filter> filters = endpoint.getFilters();
for (Filter filter : filters) {
FilterHolder filterHolder = new FilterHolder();
if (endpoint.getFilterInitParameters() != null) {
filterHolder.setInitParameters(endpoint.getFilterInitParameters());
}
addFilter(endpoint, filter, filterHolder, context);
}
}
private void addFilter(
JettyHttpEndpoint endpoint, Filter filter, FilterHolder filterHolder, ServletContextHandler context) {
addFilter(endpoint, filterHolder, filter, context);
}
private void addFilter(ServletContextHandler context, FilterHolder filterHolder, String pathSpec) {
context.getServletHandler().addFilterWithMapping(filterHolder, pathSpec, 0);
}
private void enableMultipartFilter(HttpCommonEndpoint endpoint, Server server) throws Exception {
ServletContextHandler context = server.getDescendant(ServletContextHandler.class);
CamelContext camelContext = this.getCamelContext();
FilterHolder filterHolder = new FilterHolder();
filterHolder.setInitParameter("deleteFiles", "true");
if (ObjectHelper.isNotEmpty(camelContext.getGlobalOption(TMP_DIR))) {
File file = new File(camelContext.getGlobalOption(TMP_DIR));
if (!file.isDirectory()) {
throw new RuntimeCamelException(
"The temp file directory of camel-jetty is not exists, please recheck it with directory name :"
+ camelContext.getGlobalOptions().get(TMP_DIR));
}
context.setAttribute("jakarta.servlet.context.tempdir", file);
}
// if a filter ref was provided, use it.
Filter filter = ((JettyHttpEndpoint) endpoint).getMultipartFilter();
if (filter == null) {
// if no filter ref was provided, use the default filter
filter = new MultiPartFilter();
}
final String pathSpec = addFilter(endpoint, filterHolder, filter, context);
LOG.debug("using multipart filter implementation {} for path {}", filter.getClass().getName(), pathSpec);
}
private String addFilter(
HttpCommonEndpoint endpoint, FilterHolder filterHolder, Filter filter, ServletContextHandler context) {
filterHolder.setFilter(new CamelFilterWrapper(filter));
String pathSpec = endpoint.getPath();
if (pathSpec == null || pathSpec.isEmpty()) {
pathSpec = "/";
}
if (endpoint.isMatchOnUriPrefix()) {
pathSpec = pathSpec.endsWith("/") ? pathSpec + "*" : pathSpec + "/*";
}
addFilter(context, filterHolder, pathSpec);
return pathSpec;
}
/**
* Disconnects the URL specified on the endpoint from the specified processor.
*/
@Override
public void disconnect(HttpConsumer consumer) throws Exception {
// If the connector is not needed anymore then stop it
HttpCommonEndpoint endpoint = consumer.getEndpoint();
String connectorKey = getConnectorKey(endpoint);
try {
CONNECTORS.computeIfPresent(connectorKey, (cKey, connectorRef) -> {
try {
return disconnect(consumer, connectorRef);
} catch (Exception e) {
throw new RuntimeCamelException(e);
}
});
} catch (RuntimeCamelException e) {
throw (Exception) e.getCause();
}
}
private ConnectorRef disconnect(HttpConsumer consumer, ConnectorRef connectorRef) throws Exception {
connectorRef.servlet.disconnect(consumer);
if (connectorRef.decrement() == 0) {
connectorRef.server.removeConnector(connectorRef.connector);
connectorRef.connector.stop();
connectorRef.server.stop();
// Camel controls the lifecycle of these entities so remove the
// registered MBeans when Camel is done with the managed objects.
if (mbContainer != null) {
this.removeServerMBean(connectorRef.server);
//mbContainer.removeBean(connectorRef.connector);
}
if (defaultQueuedThreadPool != null) {
try {
defaultQueuedThreadPool.stop();
} catch (Exception t) {
defaultQueuedThreadPool.destroy();
} finally {
defaultQueuedThreadPool = null;
}
}
return null;
}
return connectorRef;
}
private String getConnectorKey(HttpCommonEndpoint endpoint) {
return endpoint.getProtocol() + ":" + endpoint.getHttpUri().getHost() + ":" + endpoint.getPort();
}
// Properties
// -------------------------------------------------------------------------
public SecureRequestCustomizer getSecureRequestCustomizer() {
return secureRequestCustomizer;
}
/**
* To use a custom SecureRequestCustomizer. The option is a org.eclipse.jetty.server.SecureRequestCustomizer type.
*/
@Metadata(description = "To use a custom SecureRequestCustomizer. The option is a org.eclipse.jetty.server.SecureRequestCustomizer type.",
label = "advanced")
public void setSecureRequestCustomizer(SecureRequestCustomizer secureRequestCustomizer) {
this.secureRequestCustomizer = secureRequestCustomizer;
}
public String getSslKeyPassword() {
return sslKeyPassword;
}
/**
* The key password, which is used to access the certificate's key entry in the keystore (this is the same password
* that is supplied to the keystore command's -keypass option).
*/
@Metadata(description = "The key password, which is used to access the certificate's key entry in the keystore "
+ "(this is the same password that is supplied to the keystore command's -keypass option).",
label = "security", secret = true)
public void setSslKeyPassword(String sslKeyPassword) {
this.sslKeyPassword = sslKeyPassword;
}
public String getSslPassword() {
return sslPassword;
}
/**
* The ssl password, which is required to access the keystore file (this is the same password that is supplied to
* the keystore command's -storepass option).
*/
@Metadata(description = "The ssl password, which is required to access the keystore file (this is the same password that is supplied to the keystore command's -storepass option).",
label = "security", secret = true)
public void setSslPassword(String sslPassword) {
this.sslPassword = sslPassword;
}
/**
* Specifies the location of the Java keystore file, which contains the Jetty server's own X.509 certificate in a
* key entry.
*/
@Metadata(description = "Specifies the location of the Java keystore file, which contains the Jetty server's own X.509 certificate in a key entry.",
label = "security")
public void setKeystore(String sslKeystore) {
this.sslKeystore = sslKeystore;
}
public String getKeystore() {
return sslKeystore;
}
public ErrorHandler getErrorHandler() {
return errorHandler;
}
/**
* This option is used to set the ErrorHandler that Jetty server uses.
*/
@Metadata(description = "This option is used to set the ErrorHandler that Jetty server uses.", label = "advanced")
public void setErrorHandler(ErrorHandler errorHandler) {
this.errorHandler = errorHandler;
}
protected Connector getConnector(Server server, JettyHttpEndpoint endpoint) {
Connector connector;
if ("https".equals(endpoint.getProtocol())) {
connector = getSslSocketConnector(server, endpoint);
} else {
connector = getSocketConnector(server, endpoint);
}
return connector;
}
protected Connector getSocketConnector(Server server, JettyHttpEndpoint endpoint) {
Connector answer = null;
if (socketConnectors != null) {
answer = socketConnectors.get(endpoint.getPort());
}
if (answer == null) {
answer = createConnector(server, endpoint);
}
return answer;
}
protected Connector getSslSocketConnector(Server server, JettyHttpEndpoint endpoint) {
Connector answer = null;
if (sslSocketConnectors != null) {
answer = sslSocketConnectors.get(endpoint.getPort());
}
if (answer == null) {
answer = createConnector(server, endpoint);
}
return answer;
}
protected Connector createConnector(Server server, JettyHttpEndpoint endpoint) {
// now we just use the SelectChannelConnector as the default connector
SslContextFactory.Server sslcf = null;
// Note that this was set on the endpoint when it was constructed. It was
// either explicitly set at the component or on the endpoint, but either way,
// the value is already set. We therefore do not need to look at the component
// level SSLContextParameters again in this method.
SSLContextParameters endpointSslContextParameters = endpoint.getSslContextParameters();
if (endpointSslContextParameters != null) {
try {
sslcf = (SslContextFactory.Server) createSslContextFactory(endpointSslContextParameters, false);
} catch (Exception e) {
throw new RuntimeCamelException(e);
}
} else if ("https".equals(endpoint.getProtocol())) {
sslcf = new SslContextFactory.Server();
sslcf.setEndpointIdentificationAlgorithm(null);
String keystoreProperty = System.getProperty(JETTY_SSL_KEYSTORE);
if (keystoreProperty != null) {
sslcf.setKeyStorePath(keystoreProperty);
} else if (sslKeystore != null) {
sslcf.setKeyStorePath(sslKeystore);
}
String keystorePassword = System.getProperty(JETTY_SSL_KEYPASSWORD);
if (keystorePassword != null) {
sslcf.setKeyManagerPassword(keystorePassword);
} else if (sslKeyPassword != null) {
sslcf.setKeyManagerPassword(sslKeyPassword);
}
String password = System.getProperty(JETTY_SSL_PASSWORD);
if (password != null) {
sslcf.setKeyStorePassword(password);
} else if (sslPassword != null) {
sslcf.setKeyStorePassword(sslPassword);
}
}
return createConnectorJettyInternal(server, endpoint, sslcf);
}
protected abstract AbstractConnector createConnectorJettyInternal(
Server server, JettyHttpEndpoint endpoint, SslContextFactory.Server sslcf);
private SslContextFactory createSslContextFactory(SSLContextParameters ssl, boolean client)
throws GeneralSecurityException, IOException {
SslContextFactory answer;
if (!client) {
answer = new SslContextFactory.Server();
} else {
answer = new SslContextFactory.Client();
}
if (ssl != null) {
answer.setSslContext(ssl.createSSLContext(getCamelContext()));
}
// jetty default is
// addExcludeProtocols("SSL", "SSLv2", "SSLv2Hello", "SSLv3");
// setExcludeCipherSuites("^.*_(MD5|SHA|SHA1)$");
// configure include/exclude ciphers and protocols
if (ssl != null && ssl.getCipherSuitesFilter() != null) {
List<String> includeCiphers = ssl.getCipherSuitesFilter().getInclude();
if (includeCiphers != null && !includeCiphers.isEmpty()) {
String[] arr = includeCiphers.toArray(new String[0]);
answer.setIncludeCipherSuites(arr);
} else {
answer.setIncludeCipherSuites(".*");
}
List<String> excludeCiphers = ssl.getCipherSuitesFilter().getExclude();
if (excludeCiphers != null && !excludeCiphers.isEmpty()) {
String[] arr = excludeCiphers.toArray(new String[0]);
answer.setExcludeCipherSuites(arr);
}
}
if (ssl != null && ssl.getSecureSocketProtocolsFilter() != null) {
List<String> includeProtocols = ssl.getSecureSocketProtocolsFilter().getInclude();
if (includeProtocols != null && !includeProtocols.isEmpty()) {
String[] arr = includeProtocols.toArray(new String[0]);
answer.setIncludeProtocols(arr);
} else {
answer.setIncludeProtocols(".*");
}
List<String> excludeProtocols = ssl.getSecureSocketProtocolsFilter().getExclude();
if (excludeProtocols != null && !excludeProtocols.isEmpty()) {
String[] arr = excludeProtocols.toArray(new String[0]);
answer.setExcludeProtocols(arr);
}
}
return answer;
}
protected boolean checkSSLContextFactoryConfig(Object instance) {
try {
Method method = instance.getClass().getMethod("checkConfig");
return (Boolean) method.invoke(instance);
} catch (IllegalArgumentException | NoSuchMethodException | IllegalAccessException | InvocationTargetException ex) {
// ignore
}
return false;
}
public Map<Integer, Connector> getSslSocketConnectors() {
return sslSocketConnectors;
}
/**
* A map which contains per port number specific SSL connectors.
*/
@Metadata(description = "A map which contains per port number specific SSL connectors.", label = "security")
public void setSslSocketConnectors(Map<Integer, Connector> connectors) {
sslSocketConnectors = connectors;
}
/**
* A map which contains per port number specific HTTP connectors. Uses the same principle as sslSocketConnectors.
*/
@Metadata(description = "A map which contains per port number specific HTTP connectors. Uses the same principle as sslSocketConnectors.",
label = "security")
public void setSocketConnectors(Map<Integer, Connector> socketConnectors) {
this.socketConnectors = socketConnectors;
}
public Map<Integer, Connector> getSocketConnectors() {
return socketConnectors;
}
public Integer getMinThreads() {
return minThreads;
}
/**
* To set a value for minimum number of threads in server thread pool. Notice that both a min and max size must be
* configured.
*/
@Metadata(description = "To set a value for minimum number of threads in server thread pool. Notice that both a min and max size must be configured.",
label = "consumer")
public void setMinThreads(Integer minThreads) {
this.minThreads = minThreads;
}
public Integer getMaxThreads() {
return maxThreads;
}
/**
* To set a value for maximum number of threads in server thread pool. Notice that both a min and max size must be
* configured.
*/
@Metadata(description = "To set a value for maximum number of threads in server thread pool. Notice that both a min and max size must be configured.",
label = "consumer")
public void setMaxThreads(Integer maxThreads) {
this.maxThreads = maxThreads;
}
public ThreadPool getThreadPool() {
return threadPool;
}
/**
* To use a custom thread pool for the server. This option should only be used in special circumstances.
*/
@Metadata(description = "To use a custom thread pool for the server. This option should only be used in special circumstances.",
label = "consumer,advanced")
public void setThreadPool(ThreadPool threadPool) {
this.threadPool = threadPool;
}
public boolean isEnableJmx() {
return enableJmx;
}
/**
* If this option is true, Jetty JMX support will be enabled for this endpoint.
*/
@Metadata(description = "If this option is true, Jetty JMX support will be enabled for this endpoint.")
public void setEnableJmx(boolean enableJmx) {
this.enableJmx = enableJmx;
}
/**
* Not to be used - use JettyHttpBinding instead.
*/
@Override
@Metadata(description = "Not to be used - use JettyHttpBinding instead.", label = "advanced")
public void setHttpBinding(HttpBinding httpBinding) {
throw new IllegalArgumentException("Not to be used - use JettyHttpBinding instead.");
}
/**
* Jetty component does not use HttpConfiguration.
*/
@Override
@Metadata(description = "Jetty component does not use HttpConfiguration.", label = "advanced")
public void setHttpConfiguration(HttpConfiguration httpConfiguration) {
throw new IllegalArgumentException("Jetty component does not use HttpConfiguration.");
}
public MBeanContainer getMbContainer() {
lock.lock();
try {
// If null, provide the default implementation.
if (mbContainer == null) {
MBeanServer mbs = null;
final ManagementStrategy mStrategy = this.getCamelContext().getManagementStrategy();
final ManagementAgent mAgent = mStrategy.getManagementAgent();
if (mAgent != null) {
mbs = mAgent.getMBeanServer();
}
if (mbs != null) {
mbContainer = new MBeanContainer(mbs);
} else {
LOG.warn("JMX disabled in CamelContext. Jetty JMX extensions will remain disabled.");
}
}
return this.mbContainer;
} finally {
lock.unlock();
}
}
/**
* To use a existing configured org.eclipse.jetty.jmx.MBeanContainer if JMX is enabled that Jetty uses for
* registering mbeans.
*/
@Metadata(description = "To use a existing configured org.eclipse.jetty.jmx.MBeanContainer if JMX is enabled that Jetty uses for registering mbeans.",
label = "advanced")
public void setMbContainer(MBeanContainer mbContainer) {
this.mbContainer = mbContainer;
}
public Map<String, Object> getSslSocketConnectorProperties() {
return sslSocketConnectorProperties;
}
/**
* A map which contains general SSL connector properties.
*/
@Metadata(description = "A map which contains general SSL connector properties.", label = "security")
public void setSslSocketConnectorProperties(Map<String, Object> sslSocketConnectorProperties) {
this.sslSocketConnectorProperties = sslSocketConnectorProperties;
}
public Map<String, Object> getSocketConnectorProperties() {
return socketConnectorProperties;
}
/**
* A map which contains general HTTP connector properties. Uses the same principle as sslSocketConnectorProperties.
*/
@Metadata(description = "A map which contains general HTTP connector properties. Uses the same principle as sslSocketConnectorProperties.",
label = "security")
public void setSocketConnectorProperties(Map<String, Object> socketConnectorProperties) {
this.socketConnectorProperties = socketConnectorProperties;
}
public void addSocketConnectorProperty(String key, Object value) {
if (socketConnectorProperties == null) {
socketConnectorProperties = new HashMap<>();
}
socketConnectorProperties.put(key, value);
}
public void addSslSocketConnectorProperty(String key, Object value) {
if (sslSocketConnectorProperties == null) {
sslSocketConnectorProperties = new HashMap<>();
}
sslSocketConnectorProperties.put(key, value);
}
public Long getContinuationTimeout() {
return continuationTimeout;
}
/**
* Allows to set a timeout in millis when using Jetty as consumer (server). By default Jetty uses 30000. You can use
* a value of <= 0 to never expire. If a timeout occurs then the request will be expired and Jetty will return back
* a http error 503 to the client. This option is only in use when using Jetty with the Asynchronous Routing Engine.
*/
@Metadata(description = "Allows to set a timeout in millis when using Jetty as consumer (server)."
+ " By default Jetty uses 30000. You can use a value of <= 0 to never expire."
+ " If a timeout occurs then the request will be expired and Jetty will return back a http error 503 to the client."
+ " This option is only in use when using Jetty with the Asynchronous Routing Engine.",
defaultValue = "30000", label = "consumer")
public void setContinuationTimeout(Long continuationTimeout) {
this.continuationTimeout = continuationTimeout;
}
public boolean isUseContinuation() {
return useContinuation;
}
/**
* Whether or not to use Jetty continuations for the Jetty Server.
*/
@Metadata(description = "Whether or not to use Jetty continuations for the Jetty Server.", defaultValue = "true",
label = "consumer")
public void setUseContinuation(boolean useContinuation) {
this.useContinuation = useContinuation;
}
public SSLContextParameters getSslContextParameters() {
return sslContextParameters;
}
/**
* To configure security using SSLContextParameters
*/
@Metadata(description = "To configure security using SSLContextParameters", label = "security")
public void setSslContextParameters(SSLContextParameters sslContextParameters) {
this.sslContextParameters = sslContextParameters;
}
@Override
public boolean isUseGlobalSslContextParameters() {
return this.useGlobalSslContextParameters;
}
/**
* Enable usage of global SSL context parameters
*/
@Override
@Metadata(description = "Enable usage of global SSL context parameters", label = "security", defaultValue = "false")
public void setUseGlobalSslContextParameters(boolean useGlobalSslContextParameters) {
this.useGlobalSslContextParameters = useGlobalSslContextParameters;
}
public Integer getResponseBufferSize() {
return responseBufferSize;
}
/**
* Allows to configure a custom value of the response buffer size on the Jetty connectors.
*/
@Metadata(description = "Allows to configure a custom value of the response buffer size on the Jetty connectors.")
public void setResponseBufferSize(Integer responseBufferSize) {
this.responseBufferSize = responseBufferSize;
}
public Integer getRequestBufferSize() {
return requestBufferSize;
}
/**
* Allows to configure a custom value of the request buffer size on the Jetty connectors.
*/
@Metadata(description = "Allows to configure a custom value of the request buffer size on the Jetty connectors.")
public void setRequestBufferSize(Integer requestBufferSize) {
this.requestBufferSize = requestBufferSize;
}
public Integer getRequestHeaderSize() {
return requestHeaderSize;
}
/**
* Allows to configure a custom value of the request header size on the Jetty connectors.
*/
@Metadata(description = "Allows to configure a custom value of the request header size on the Jetty connectors.")
public void setRequestHeaderSize(Integer requestHeaderSize) {
this.requestHeaderSize = requestHeaderSize;
}
public Integer getResponseHeaderSize() {
return responseHeaderSize;
}
/**
* Allows to configure a custom value of the response header size on the Jetty connectors.
*/
@Metadata(description = "Allows to configure a custom value of the response header size on the Jetty connectors.")
public void setResponseHeaderSize(Integer responseHeaderSize) {
this.responseHeaderSize = responseHeaderSize;
}
public String getProxyHost() {
return proxyHost;
}
/**
* To use a http proxy to configure the hostname.
*/
@Metadata(description = "To use a http proxy to configure the hostname.", label = "proxy")
public void setProxyHost(String proxyHost) {
this.proxyHost = proxyHost;
}
public Integer getProxyPort() {
return proxyPort;
}
/**
* To use a http proxy to configure the port number.
*/
@Metadata(description = "To use a http proxy to configure the port number.", label = "proxy")
public void setProxyPort(Integer proxyPort) {
this.proxyPort = proxyPort;
}
public boolean isUseXForwardedForHeader() {
return useXForwardedForHeader;
}
/**
* To use the X-Forwarded-For header in HttpServletRequest.getRemoteAddr.
*/
@Metadata(description = "To use the X-Forwarded-For header in HttpServletRequest.getRemoteAddr.")
public void setUseXForwardedForHeader(boolean useXForwardedForHeader) {
this.useXForwardedForHeader = useXForwardedForHeader;
}
public boolean isSendServerVersion() {
return sendServerVersion;
}
/**
* If the option is true, jetty will send the server header with the jetty version information to the client which
* sends the request. NOTE please make sure there is no any other camel-jetty endpoint is share the same port,
* otherwise this option may not work as expected.
*/
@Metadata(description = "If the option is true, jetty will send the server header with the jetty version information to the client which sends the request."
+ " NOTE please make sure there is no any other camel-jetty endpoint is share the same port, otherwise this option may not work as expected.",
defaultValue = "true", label = "consumer")
public void setSendServerVersion(boolean sendServerVersion) {
this.sendServerVersion = sendServerVersion;
}
public long getMaxFileSize() {
return maxFileSize;
}
@Metadata(description = "The maximum size allowed for uploaded files. -1 means no limit",
defaultValue = "-1", label = "consumer,advanced")
public void setMaxFileSize(long maxFileSize) {
this.maxFileSize = maxFileSize;
}
public long getMaxRequestSize() {
return maxRequestSize;
}
@Metadata(description = "The maximum size allowed for multipart/form-data requests. -1 means no limit",
defaultValue = "-1", label = "consumer,advanced")
public void setMaxRequestSize(long maxRequestSize) {
this.maxRequestSize = maxRequestSize;
}
public int getFileSizeThreshold() {
return fileSizeThreshold;
}
@Metadata(description = "The size threshold after which files will be written to disk for multipart/form-data requests. By default the files are not written to disk",
defaultValue = "0", label = "consumer,advanced")
public void setFileSizeThreshold(int fileSizeThreshold) {
this.fileSizeThreshold = fileSizeThreshold;
}
public String getFilesLocation() {
return filesLocation;
}
@Metadata(description = "The directory location where files will be store for multipart/form-data requests. By default the files are written in the system temporary folder",
label = "consumer,advanced")
public void setFilesLocation(String filesLocation) {
this.filesLocation = filesLocation;
}
// Implementation methods
// -------------------------------------------------------------------------
@Override
public Consumer createConsumer(
CamelContext camelContext, Processor processor, String verb, String basePath, String uriTemplate,
String consumes, String produces, RestConfiguration configuration, Map<String, Object> parameters)
throws Exception {
return doCreateConsumer(camelContext, processor, verb, basePath, uriTemplate, configuration,
parameters, false);
}
@Override
public Consumer createApiConsumer(
CamelContext camelContext, Processor processor, String contextPath,
RestConfiguration configuration, Map<String, Object> parameters)
throws Exception {
// reuse the createConsumer method we already have. The api need to use GET and match on uri prefix
return doCreateConsumer(camelContext, processor, "GET", contextPath, null, configuration, parameters, true);
}
Consumer doCreateConsumer(
CamelContext camelContext, Processor processor, String verb, String basePath, String uriTemplate,
RestConfiguration configuration, Map<String, Object> parameters, boolean api)
throws Exception {
String path = basePath;
if (uriTemplate != null) {
// make sure to avoid double slashes
if (uriTemplate.startsWith("/")) {
path = path + uriTemplate;
} else {
path = path + "/" + uriTemplate;
}
}
path = FileUtil.stripLeadingSeparator(path);
String scheme = "http";
String host = "";
int port = 0;
// if no explicit port/host configured, then use port from rest configuration
RestConfiguration config = configuration;
if (config == null) {
config = CamelContextHelper.getRestConfiguration(getCamelContext(), "jetty");
}
if (config.getScheme() != null) {
scheme = config.getScheme();
}
if (config.getHost() != null) {
host = config.getHost();
}
int num = config.getPort();
if (num > 0) {
port = num;
}
// prefix path with context-path if configured in rest-dsl configuration
String contextPath = config.getContextPath();
if (ObjectHelper.isNotEmpty(contextPath)) {
contextPath = FileUtil.stripTrailingSeparator(contextPath);
contextPath = FileUtil.stripLeadingSeparator(contextPath);
if (ObjectHelper.isNotEmpty(contextPath)) {
path = contextPath + "/" + path;
}
}
// if no explicit hostname set then resolve the hostname
if (ObjectHelper.isEmpty(host)) {
host = RestComponentHelper.resolveRestHostName(host, config);
}
Map<String, Object> map = RestComponentHelper.initRestEndpointProperties("jetty", config);
boolean cors = config.isEnableCORS();
if (cors) {
// allow HTTP Options as we want to handle CORS in rest-dsl
map.put("optionsEnabled", "true");
}
if (api) {
map.put("matchOnUriPrefix", "true");
}
RestComponentHelper.addHttpRestrictParam(map, verb, cors);
String url = RestComponentHelper.createRestConsumerUrl("jetty", scheme, host, port, path, map);
JettyHttpEndpoint endpoint = (JettyHttpEndpoint) camelContext.getEndpoint(url, parameters);
boolean binding = map.containsKey("httpBindingRef") || map.containsKey("httpBinding");
if (!binding) {
// use the rest binding, if not using a custom http binding
endpoint.setHttpBinding(new JettyRestHttpBinding(endpoint));
// disable this filter as we want to use ours
endpoint.setEnableMultipartFilter(false);
}
// configure consumer properties
Consumer consumer = endpoint.createConsumer(processor);
if (config.getConsumerProperties() != null && !config.getConsumerProperties().isEmpty()) {
setProperties(camelContext, consumer, config.getConsumerProperties());
}
// the endpoint must be started before creating the producer
ServiceHelper.startService(endpoint);
return consumer;
}
protected CamelServlet createServletForConnector(
Server server, Connector connector,
List<Handler> handlers, JettyHttpEndpoint endpoint)
throws Exception {
ServletContextHandler context
= new ServletContextHandler("/", false, false);
server.setHandler(context);
addJettyHandlers(server, handlers);
CamelServlet camelServlet = new CamelContinuationServlet();
ServletHolder holder = new ServletHolder();
holder.setServlet(camelServlet);
holder.setAsyncSupported(true);
holder.setInitParameter(CamelServlet.ASYNC_PARAM, Boolean.toString(endpoint.isAsync()));
context.addServlet(holder, "/*");
String location = endpoint.getFilesLocation();
if (location == null) {
File file = File.createTempFile("camel", "");
if (!FileUtil.deleteFile(file)) {
LOG.error("failed to delete {}", file);
}
location = file.getParentFile().getAbsolutePath();
}
//must register the MultipartConfig to make jetty server multipart aware
holder.getRegistration()
.setMultipartConfig(new MultipartConfigElement(
location, endpoint.getMaxFileSize(), endpoint.getMaxRequestSize(), endpoint.getFileSizeThreshold()));
// use rest enabled resolver in case we use rest
camelServlet.setServletResolveConsumerStrategy(new HttpRestServletResolveConsumerStrategy());
return camelServlet;
}
protected void addJettyHandlers(Server server, List<Handler> handlers) {
if (handlers != null && !handlers.isEmpty()) {
for (Handler handler : handlers) {
if (handler instanceof Handler.Wrapper) {
// avoid setting a handler more than once
if (!isHandlerInChain(server.getHandler(), handler)) {
((Handler.Wrapper) handler).setHandler(server.getHandler());
server.setHandler(handler);
}
} else {
ContextHandlerCollection handlerCollection = new ContextHandlerCollection();
handlerCollection.addHandler(server.getHandler());
handlerCollection.addHandler(handler);
server.setHandler(handlerCollection);
}
}
}
}
protected boolean isHandlerInChain(Handler current, Handler handler) {
if (handler.equals(current)) {
//Found a match in the chain
return true;
} else if (current instanceof Handler.Wrapper) {
//Inspect the next handler in the chain
return isHandlerInChain(((Handler.Wrapper) current).getHandler(), handler);
} else {
//End of chain
return false;
}
}
protected Server createServer() {
ThreadPool tp = threadPool;
defaultQueuedThreadPool = null;
// configure thread pool if min/max given
if (minThreads != null || maxThreads != null) {
if (getThreadPool() != null) {
throw new IllegalArgumentException(
"You cannot configure both minThreads/maxThreads and a custom threadPool on JettyHttpComponent: "
+ this);
}
defaultQueuedThreadPool = new QueuedThreadPool();
if (minThreads != null) {
defaultQueuedThreadPool.setMinThreads(minThreads.intValue());
}
if (maxThreads != null) {
defaultQueuedThreadPool.setMaxThreads(maxThreads.intValue());
}
tp = defaultQueuedThreadPool;
}
Server s;
if (tp != null) {
s = new Server(tp);
} else {
s = new Server();
}
if (isEnableJmx()) {
enableJmx(s);
}
if (defaultQueuedThreadPool != null) {
// let the thread names indicate they are from the server
defaultQueuedThreadPool.setName("CamelJettyServer(" + ObjectHelper.getIdentityHashCode(s) + ")");
try {
defaultQueuedThreadPool.start();
} catch (Exception e) {
throw new RuntimeCamelException("Error starting JettyServer thread pool: " + defaultQueuedThreadPool, e);
}
}
ContextHandlerCollection collection = new ContextHandlerCollection();
s.setHandler(collection);
// setup the error handler if it set to Jetty component
if (getErrorHandler() != null) {
s.setErrorHandler(getErrorHandler());
} else {
//need an error handler that won't leak information about the exception back to the client.
ErrorHandler eh = new ErrorHandler() {
@Override
public boolean handle(
Request baseRequest, Response response, Callback callback)
throws Exception {
String msg = HttpStatus.getMessage(response.getStatus());
Object timeout = baseRequest.getAttribute(CamelContinuationServlet.TIMEOUT_ERROR);
if (Boolean.TRUE.equals(timeout)) {
baseRequest.setAttribute(RequestDispatcher.ERROR_STATUS_CODE, 504);
response.setStatus(504);
}
baseRequest.setAttribute(RequestDispatcher.ERROR_MESSAGE, msg);
return super.handle(baseRequest, response, callback);
}
};
s.setErrorHandler(eh);
}
return s;
}
@Override
protected void doInit() throws Exception {
super.doInit();
try {
RestConfiguration config = CamelContextHelper.getRestConfiguration(getCamelContext(), "jetty");
// configure additional options on jetty configuration
if (config.getComponentProperties() != null && !config.getComponentProperties().isEmpty()) {
setProperties(this, config.getComponentProperties());
}
} catch (IllegalArgumentException e) {
// if there's a mismatch between the component and the rest-configuration,
// then getRestConfiguration throws IllegalArgumentException which can be
// safely ignored as it means there's no special conf for this component.
}
}
@Override
protected void doStop() throws Exception {
super.doStop();
for (Map.Entry<String, ConnectorRef> connectorEntry : CONNECTORS.entrySet()) {
ConnectorRef connectorRef = connectorEntry.getValue();
if (connectorRef != null && connectorRef.getRefCount() == 0) {
connectorRef.server.removeConnector(connectorRef.connector);
connectorRef.connector.stop();
// Camel controls the lifecycle of these entities so remove the
// registered MBeans when Camel is done with the managed objects.
removeServerMBean(connectorRef.server);
connectorRef.server.stop();
//removeServerMBean(connectorRef.connector);
CONNECTORS.remove(connectorEntry.getKey());
}
}
if (mbContainer != null) {
mbContainer.destroy();
mbContainer = null;
}
}
private void addServerMBean(Server server) {
if (mbContainer == null) {
return;
}
try {
Object o = getContainer(server);
o.getClass().getMethod("addEventListener", EventListener.class).invoke(o, mbContainer);
mbContainer.getClass().getMethod("beanAdded", Container.class, Object.class)
.invoke(mbContainer, null, server);
} catch (RuntimeException rex) {
throw rex;
} catch (Exception r) {
throw new RuntimeException(r);
}
}
private void removeServerMBean(Server server) {
try {
mbContainer.getClass().getMethod("beanRemoved", Container.class, Object.class)
.invoke(mbContainer, null, server);
} catch (RuntimeException rex) {
throw rex;
} catch (Exception r) {
try {
mbContainer.getClass().getMethod("removeBean", Object.class)
.invoke(mbContainer, server);
} catch (RuntimeException rex) {
throw rex;
} catch (Exception r2) {
throw new RuntimeException(r);
}
}
}
private static Container getContainer(Object server) {
if (server instanceof Container) {
return (Container) server;
}
try {
return (Container) server.getClass().getMethod("getContainer").invoke(server);
} catch (RuntimeException t) {
throw t;
} catch (Exception t) {
throw new RuntimeException(t);
}
}
}
| ConnectorRef |
java | apache__camel | components/camel-mapstruct/src/main/java/org/apache/camel/component/mapstruct/MapstructEndpoint.java | {
"start": 1492,
"end": 2340
} | class ____ extends DefaultEndpoint {
@UriPath
@Metadata(required = true)
private String className;
private transient Class<?> clazz;
@UriParam(defaultValue = "true")
private boolean mandatory = true;
public MapstructEndpoint(String endpointUri, Component component) {
super(endpointUri, component);
}
@Override
public boolean isRemote() {
return false;
}
@Override
public Producer createProducer() throws Exception {
return new MapstructProducer(this, clazz, mandatory);
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
throw new UnsupportedOperationException("Consumer is not supported");
}
public String getClassName() {
return className;
}
/**
* The fully qualified | MapstructEndpoint |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_1648/Issue1648Test.java | {
"start": 528,
"end": 844
} | class ____ {
@ProcessorTest
public void shouldCorrectlyMarkSourceAsUsed() {
Source source = new Source();
source.setSourceValue( "value" );
Target target = Issue1648Mapper.INSTANCE.map( source );
assertThat( target.getTargetValue() ).isEqualTo( "value" );
}
}
| Issue1648Test |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/DefaultListableBeanFactoryTests.java | {
"start": 144951,
"end": 145005
} | interface ____ {
}
@Priority(1)
static | PriorityService |
java | spring-projects__spring-security | config/src/main/java/org/springframework/security/config/annotation/web/configurers/oauth2/server/authorization/OAuth2ClientRegistrationEndpointConfigurer.java | {
"start": 2955,
"end": 13406
} | class ____ extends AbstractOAuth2Configurer {
private RequestMatcher requestMatcher;
private final List<AuthenticationConverter> clientRegistrationRequestConverters = new ArrayList<>();
private Consumer<List<AuthenticationConverter>> clientRegistrationRequestConvertersConsumer = (
clientRegistrationRequestConverters) -> {
};
private final List<AuthenticationProvider> authenticationProviders = new ArrayList<>();
private Consumer<List<AuthenticationProvider>> authenticationProvidersConsumer = (authenticationProviders) -> {
};
private AuthenticationSuccessHandler clientRegistrationResponseHandler;
private AuthenticationFailureHandler errorResponseHandler;
private boolean openRegistrationAllowed;
/**
* Restrict for internal use only.
* @param objectPostProcessor an {@code ObjectPostProcessor}
*/
OAuth2ClientRegistrationEndpointConfigurer(ObjectPostProcessor<Object> objectPostProcessor) {
super(objectPostProcessor);
}
/**
* Adds an {@link AuthenticationConverter} used when attempting to extract a Client
* Registration Request from {@link HttpServletRequest} to an instance of
* {@link OAuth2ClientRegistrationAuthenticationToken} used for authenticating the
* request.
* @param clientRegistrationRequestConverter an {@link AuthenticationConverter} used
* when attempting to extract a Client Registration Request from
* {@link HttpServletRequest}
* @return the {@link OAuth2ClientRegistrationEndpointConfigurer} for further
* configuration
*/
public OAuth2ClientRegistrationEndpointConfigurer clientRegistrationRequestConverter(
AuthenticationConverter clientRegistrationRequestConverter) {
Assert.notNull(clientRegistrationRequestConverter, "clientRegistrationRequestConverter cannot be null");
this.clientRegistrationRequestConverters.add(clientRegistrationRequestConverter);
return this;
}
/**
* Sets the {@code Consumer} providing access to the {@code List} of default and
* (optionally) added
* {@link #clientRegistrationRequestConverter(AuthenticationConverter)
* AuthenticationConverter}'s allowing the ability to add, remove, or customize a
* specific {@link AuthenticationConverter}.
* @param clientRegistrationRequestConvertersConsumer the {@code Consumer} providing
* access to the {@code List} of default and (optionally) added
* {@link AuthenticationConverter}'s
* @return the {@link OAuth2ClientRegistrationEndpointConfigurer} for further
* configuration
*/
public OAuth2ClientRegistrationEndpointConfigurer clientRegistrationRequestConverters(
Consumer<List<AuthenticationConverter>> clientRegistrationRequestConvertersConsumer) {
Assert.notNull(clientRegistrationRequestConvertersConsumer,
"clientRegistrationRequestConvertersConsumer cannot be null");
this.clientRegistrationRequestConvertersConsumer = clientRegistrationRequestConvertersConsumer;
return this;
}
/**
* Adds an {@link AuthenticationProvider} used for authenticating an
* {@link OAuth2ClientRegistrationAuthenticationToken}.
* @param authenticationProvider an {@link AuthenticationProvider} used for
* authenticating an {@link OAuth2ClientRegistrationAuthenticationToken}
* @return the {@link OAuth2ClientRegistrationEndpointConfigurer} for further
* configuration
*/
public OAuth2ClientRegistrationEndpointConfigurer authenticationProvider(
AuthenticationProvider authenticationProvider) {
Assert.notNull(authenticationProvider, "authenticationProvider cannot be null");
this.authenticationProviders.add(authenticationProvider);
return this;
}
/**
* Sets the {@code Consumer} providing access to the {@code List} of default and
* (optionally) added {@link #authenticationProvider(AuthenticationProvider)
* AuthenticationProvider}'s allowing the ability to add, remove, or customize a
* specific {@link AuthenticationProvider}.
* @param authenticationProvidersConsumer the {@code Consumer} providing access to the
* {@code List} of default and (optionally) added {@link AuthenticationProvider}'s
* @return the {@link OAuth2ClientRegistrationEndpointConfigurer} for further
* configuration
*/
public OAuth2ClientRegistrationEndpointConfigurer authenticationProviders(
Consumer<List<AuthenticationProvider>> authenticationProvidersConsumer) {
Assert.notNull(authenticationProvidersConsumer, "authenticationProvidersConsumer cannot be null");
this.authenticationProvidersConsumer = authenticationProvidersConsumer;
return this;
}
/**
* Sets the {@link AuthenticationSuccessHandler} used for handling an
* {@link OAuth2ClientRegistrationAuthenticationToken} and returning the
* {@link OAuth2ClientRegistration Client Registration Response}.
* @param clientRegistrationResponseHandler the {@link AuthenticationSuccessHandler}
* used for handling an {@link OAuth2ClientRegistrationAuthenticationToken}
* @return the {@link OAuth2ClientRegistrationEndpointConfigurer} for further
* configuration
*/
public OAuth2ClientRegistrationEndpointConfigurer clientRegistrationResponseHandler(
AuthenticationSuccessHandler clientRegistrationResponseHandler) {
this.clientRegistrationResponseHandler = clientRegistrationResponseHandler;
return this;
}
/**
* Sets the {@link AuthenticationFailureHandler} used for handling an
* {@link OAuth2AuthenticationException} and returning the {@link OAuth2Error Error
* Response}.
* @param errorResponseHandler the {@link AuthenticationFailureHandler} used for
* handling an {@link OAuth2AuthenticationException}
* @return the {@link OAuth2ClientRegistrationEndpointConfigurer} for further
* configuration
*/
public OAuth2ClientRegistrationEndpointConfigurer errorResponseHandler(
AuthenticationFailureHandler errorResponseHandler) {
this.errorResponseHandler = errorResponseHandler;
return this;
}
/**
* Set to {@code true} if open client registration (with no initial access token) is
* allowed. The default is {@code false}.
* @param openRegistrationAllowed {@code true} if open client registration is allowed,
* {@code false} otherwise
* @return the {@link OAuth2ClientRegistrationEndpointConfigurer} for further
* configuration
*/
public OAuth2ClientRegistrationEndpointConfigurer openRegistrationAllowed(boolean openRegistrationAllowed) {
this.openRegistrationAllowed = openRegistrationAllowed;
return this;
}
@Override
void init(HttpSecurity httpSecurity) {
AuthorizationServerSettings authorizationServerSettings = OAuth2ConfigurerUtils
.getAuthorizationServerSettings(httpSecurity);
String clientRegistrationEndpointUri = authorizationServerSettings.isMultipleIssuersAllowed()
? OAuth2ConfigurerUtils
.withMultipleIssuersPattern(authorizationServerSettings.getClientRegistrationEndpoint())
: authorizationServerSettings.getClientRegistrationEndpoint();
this.requestMatcher = PathPatternRequestMatcher.withDefaults()
.matcher(HttpMethod.POST, clientRegistrationEndpointUri);
List<AuthenticationProvider> authenticationProviders = createDefaultAuthenticationProviders(httpSecurity,
this.openRegistrationAllowed);
if (!this.authenticationProviders.isEmpty()) {
authenticationProviders.addAll(0, this.authenticationProviders);
}
this.authenticationProvidersConsumer.accept(authenticationProviders);
authenticationProviders.forEach(
(authenticationProvider) -> httpSecurity.authenticationProvider(postProcess(authenticationProvider)));
}
@Override
void configure(HttpSecurity httpSecurity) {
AuthenticationManager authenticationManager = httpSecurity.getSharedObject(AuthenticationManager.class);
AuthorizationServerSettings authorizationServerSettings = OAuth2ConfigurerUtils
.getAuthorizationServerSettings(httpSecurity);
String clientRegistrationEndpointUri = authorizationServerSettings.isMultipleIssuersAllowed()
? OAuth2ConfigurerUtils
.withMultipleIssuersPattern(authorizationServerSettings.getClientRegistrationEndpoint())
: authorizationServerSettings.getClientRegistrationEndpoint();
OAuth2ClientRegistrationEndpointFilter clientRegistrationEndpointFilter = new OAuth2ClientRegistrationEndpointFilter(
authenticationManager, clientRegistrationEndpointUri);
List<AuthenticationConverter> authenticationConverters = createDefaultAuthenticationConverters();
if (!this.clientRegistrationRequestConverters.isEmpty()) {
authenticationConverters.addAll(0, this.clientRegistrationRequestConverters);
}
this.clientRegistrationRequestConvertersConsumer.accept(authenticationConverters);
clientRegistrationEndpointFilter
.setAuthenticationConverter(new DelegatingAuthenticationConverter(authenticationConverters));
if (this.clientRegistrationResponseHandler != null) {
clientRegistrationEndpointFilter.setAuthenticationSuccessHandler(this.clientRegistrationResponseHandler);
}
if (this.errorResponseHandler != null) {
clientRegistrationEndpointFilter.setAuthenticationFailureHandler(this.errorResponseHandler);
}
httpSecurity.addFilterAfter(postProcess(clientRegistrationEndpointFilter), AuthorizationFilter.class);
}
@Override
RequestMatcher getRequestMatcher() {
return this.requestMatcher;
}
private static List<AuthenticationConverter> createDefaultAuthenticationConverters() {
List<AuthenticationConverter> authenticationConverters = new ArrayList<>();
authenticationConverters.add(new OAuth2ClientRegistrationAuthenticationConverter());
return authenticationConverters;
}
private static List<AuthenticationProvider> createDefaultAuthenticationProviders(HttpSecurity httpSecurity,
boolean openRegistrationAllowed) {
List<AuthenticationProvider> authenticationProviders = new ArrayList<>();
OAuth2ClientRegistrationAuthenticationProvider clientRegistrationAuthenticationProvider = new OAuth2ClientRegistrationAuthenticationProvider(
OAuth2ConfigurerUtils.getRegisteredClientRepository(httpSecurity),
OAuth2ConfigurerUtils.getAuthorizationService(httpSecurity));
PasswordEncoder passwordEncoder = OAuth2ConfigurerUtils.getOptionalBean(httpSecurity, PasswordEncoder.class);
if (passwordEncoder != null) {
clientRegistrationAuthenticationProvider.setPasswordEncoder(passwordEncoder);
}
clientRegistrationAuthenticationProvider.setOpenRegistrationAllowed(openRegistrationAllowed);
authenticationProviders.add(clientRegistrationAuthenticationProvider);
return authenticationProviders;
}
}
| OAuth2ClientRegistrationEndpointConfigurer |
java | elastic__elasticsearch | build-tools/src/main/java/org/elasticsearch/gradle/testclusters/MockApmServer.java | {
"start": 4269,
"end": 8966
} | class ____ implements HttpHandler {
// checked by APM agent to identify the APM server version to adjust its behavior accordingly
private static final String FAKE_VERSION = """
{
"build_date": "2021-12-18T19:59:06Z",
"build_sha": "24fe620eeff5a19e2133c940c7e5ce1ceddb1445",
"publish_ready": true,
"version": "9.0.0"
}
""";
public void handle(HttpExchange t) {
try {
if ("GET".equals(t.getRequestMethod()) && "/".equals(t.getRequestURI().getPath())) {
t.sendResponseHeaders(200, FAKE_VERSION.length());
try (OutputStream os = t.getResponseBody()) {
os.write(FAKE_VERSION.getBytes());
}
return;
}
InputStream body = t.getRequestBody();
if (metricFilter == null && transactionFilter == null) {
logRequestBody(body);
} else {
logFiltered(body);
}
String response = "{}";
t.sendResponseHeaders(200, response.length());
try (OutputStream os = t.getResponseBody()) {
os.write(response.getBytes());
}
} catch (Exception e) {
e.printStackTrace();
}
}
private void logRequestBody(InputStream body) throws IOException {
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
IOUtils.copy(body, bytes);
logger.lifecycle(("MockApmServer reading JSON objects: " + bytes.toString()));
}
private void logFiltered(InputStream body) throws IOException {
ObjectMapper mapper = new ObjectMapper();
try (BufferedReader reader = new BufferedReader(new InputStreamReader(body))) {
String line;
String nodeMetadata = null;
List<JsonNode> spans = new ArrayList<>();
while ((line = reader.readLine()) != null) {
var jsonNode = mapper.readTree(line);
if (jsonNode.has("metadata")) {
nodeMetadata = jsonNode.path("metadata").path("service").path("node").path("configured_name").asText(null);
var tier = jsonNode.path("metadata").path("labels").path("node_tier").asText(null);
nodeMetadata += tier != null ? "/" + tier : "";
} else if (transactionFilter != null && jsonNode.has("transaction")) {
var transaction = jsonNode.get("transaction");
var name = transaction.get("name").asText();
if (transactionFilter.matcher(name).matches()
&& (transactionExcludesFilter == null || transactionExcludesFilter.matcher(name).matches() == false)) {
transactionCache.put(transaction.get("id").asText(), name);
logger.lifecycle("Transaction {} [{}]: {}", name, nodeMetadata, transaction);
}
} else if (jsonNode.has("span")) {
spans.add(jsonNode.get("span")); // make sure to record all transactions first
} else if (metricFilter != null && jsonNode.has("metricset")) {
var metricset = jsonNode.get("metricset");
var samples = (ObjectNode) metricset.get("samples");
for (var name : Streams.of(samples.fieldNames()).toList()) {
if (metricFilter.matcher(name).matches() == false) {
samples.remove(name);
}
}
if (samples.isEmpty() == false) {
logger.lifecycle("Metricset [{}]: {}", nodeMetadata, metricset);
}
}
}
// emit only spans for previously matched transactions using the transaction cache
for (var span : spans) {
var name = span.get("name").asText();
var transactionId = span.get("transaction_id").asText();
var transactionName = transactionCache.get(transactionId);
if (transactionName != null) {
logger.lifecycle("Span {} of {} [{}]: {}", name, transactionName, nodeMetadata, span);
}
}
}
}
}
}
| RootHandler |
java | apache__kafka | storage/src/main/java/org/apache/kafka/server/log/remote/metadata/storage/RemotePartitionMetadataStore.java | {
"start": 1986,
"end": 9095
} | class ____ extends RemotePartitionMetadataEventHandler implements Closeable {
private static final Logger log = LoggerFactory.getLogger(RemotePartitionMetadataStore.class);
private Map<TopicIdPartition, RemotePartitionDeleteMetadata> idToPartitionDeleteMetadata =
new ConcurrentHashMap<>();
private Map<TopicIdPartition, RemoteLogMetadataCache> idToRemoteLogMetadataCache =
new ConcurrentHashMap<>();
public RemotePartitionMetadataStore() {
}
@Override
public void handleRemoteLogSegmentMetadata(RemoteLogSegmentMetadata remoteLogSegmentMetadata) {
log.debug("Adding remote log segment: {}", remoteLogSegmentMetadata);
final RemoteLogSegmentId remoteLogSegmentId = remoteLogSegmentMetadata.remoteLogSegmentId();
TopicIdPartition topicIdPartition = remoteLogSegmentId.topicIdPartition();
// This should have been already existing as it is loaded when the partitions are assigned.
RemoteLogMetadataCache remoteLogMetadataCache = idToRemoteLogMetadataCache.get(topicIdPartition);
if (remoteLogMetadataCache != null) {
remoteLogMetadataCache.addCopyInProgressSegment(remoteLogSegmentMetadata);
} else {
throw new IllegalStateException("No partition metadata found for : " + topicIdPartition);
}
}
@Override
public void handleRemoteLogSegmentMetadataUpdate(RemoteLogSegmentMetadataUpdate rlsmUpdate) {
log.debug("Updating remote log segment: {}", rlsmUpdate);
RemoteLogSegmentId remoteLogSegmentId = rlsmUpdate.remoteLogSegmentId();
TopicIdPartition topicIdPartition = remoteLogSegmentId.topicIdPartition();
RemoteLogMetadataCache remoteLogMetadataCache = idToRemoteLogMetadataCache.get(topicIdPartition);
if (remoteLogMetadataCache != null) {
try {
remoteLogMetadataCache.updateRemoteLogSegmentMetadata(rlsmUpdate);
} catch (RemoteResourceNotFoundException e) {
log.warn("Error occurred while updating the remote log segment.", e);
}
} else {
throw new IllegalStateException("No partition metadata found for : " + topicIdPartition);
}
}
@Override
public void handleRemotePartitionDeleteMetadata(RemotePartitionDeleteMetadata remotePartitionDeleteMetadata) {
log.debug("Received partition delete state with: {}", remotePartitionDeleteMetadata);
TopicIdPartition topicIdPartition = remotePartitionDeleteMetadata.topicIdPartition();
idToPartitionDeleteMetadata.put(topicIdPartition, remotePartitionDeleteMetadata);
// there will be a trigger to receive delete partition marker and act on that to delete all the segments.
if (remotePartitionDeleteMetadata.state() == RemotePartitionDeleteState.DELETE_PARTITION_FINISHED) {
// remove the association for the partition.
idToRemoteLogMetadataCache.remove(topicIdPartition);
idToPartitionDeleteMetadata.remove(topicIdPartition);
}
}
@Override
public void clearTopicPartition(TopicIdPartition topicIdPartition) {
idToRemoteLogMetadataCache.remove(topicIdPartition);
}
Iterator<RemoteLogSegmentMetadata> listRemoteLogSegments(TopicIdPartition topicIdPartition)
throws RemoteStorageException {
return getRemoteLogMetadataCache(topicIdPartition).listAllRemoteLogSegments();
}
Iterator<RemoteLogSegmentMetadata> listRemoteLogSegments(TopicIdPartition topicIdPartition, int leaderEpoch)
throws RemoteStorageException {
return getRemoteLogMetadataCache(topicIdPartition).listRemoteLogSegments(leaderEpoch);
}
private RemoteLogMetadataCache getRemoteLogMetadataCache(TopicIdPartition topicIdPartition)
throws RemoteResourceNotFoundException {
Objects.requireNonNull(topicIdPartition, "topicIdPartition can not be null");
RemoteLogMetadataCache remoteLogMetadataCache = idToRemoteLogMetadataCache.get(topicIdPartition);
if (remoteLogMetadataCache == null) {
throw new RemoteResourceNotFoundException("No resource found for partition: " + topicIdPartition);
}
if (!remoteLogMetadataCache.isInitialized()) {
try {
boolean initialized = remoteLogMetadataCache.awaitInitialized(100, TimeUnit.MILLISECONDS);
if (!initialized) {
// Throwing a retriable ReplicaNotAvailableException here for clients retry.
throw new ReplicaNotAvailableException("Remote log metadata cache is not initialized for partition: " + topicIdPartition);
}
} catch (InterruptedException ex) {
throw new RemoteResourceNotFoundException("Couldn't initialize remote log metadata cache for partition: " + topicIdPartition);
}
}
return remoteLogMetadataCache;
}
Optional<RemoteLogSegmentMetadata> remoteLogSegmentMetadata(TopicIdPartition topicIdPartition,
long offset,
int epochForOffset)
throws RemoteStorageException {
return getRemoteLogMetadataCache(topicIdPartition).remoteLogSegmentMetadata(epochForOffset, offset);
}
Optional<RemoteLogSegmentMetadata> nextSegmentWithTxnIndex(TopicIdPartition topicIdPartition,
int epoch,
long offset) throws RemoteStorageException {
return getRemoteLogMetadataCache(topicIdPartition).nextSegmentWithTxnIndex(epoch, offset);
}
Optional<Long> highestLogOffset(TopicIdPartition topicIdPartition,
int leaderEpoch) throws RemoteStorageException {
return getRemoteLogMetadataCache(topicIdPartition).highestOffsetForEpoch(leaderEpoch);
}
@Override
public void close() throws IOException {
log.info("Clearing the entries from the store.");
// Clear the entries by creating unmodifiable empty maps.
// Practically, we do not use the same instances that are closed.
idToPartitionDeleteMetadata = Map.of();
idToRemoteLogMetadataCache = Map.of();
}
@Override
public void maybeLoadPartition(TopicIdPartition partition) {
idToRemoteLogMetadataCache.computeIfAbsent(partition, idPartition -> new RemoteLogMetadataCache());
}
@Override
public void markInitialized(TopicIdPartition partition) {
idToRemoteLogMetadataCache.get(partition).markInitialized();
log.trace("Remote log components are initialized for user-partition: {}", partition);
}
@Override
public boolean isInitialized(TopicIdPartition topicIdPartition) {
RemoteLogMetadataCache metadataCache = idToRemoteLogMetadataCache.get(topicIdPartition);
return metadataCache != null && metadataCache.isInitialized();
}
}
| RemotePartitionMetadataStore |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/requests/EndTxnRequest.java | {
"start": 1298,
"end": 3178
} | class ____ extends AbstractRequest.Builder<EndTxnRequest> {
public final EndTxnRequestData data;
public final boolean isTransactionV2Enabled;
public Builder(EndTxnRequestData data, boolean isTransactionV2Enabled) {
this(data, false, isTransactionV2Enabled);
}
public Builder(EndTxnRequestData data, boolean enableUnstableLastVersion, boolean isTransactionV2Enabled) {
super(ApiKeys.END_TXN, enableUnstableLastVersion);
this.data = data;
this.isTransactionV2Enabled = isTransactionV2Enabled;
}
@Override
public EndTxnRequest build(short version) {
if (!isTransactionV2Enabled) {
version = (short) Math.min(version, LAST_STABLE_VERSION_BEFORE_TRANSACTION_V2);
}
return new EndTxnRequest(data, version);
}
@Override
public String toString() {
return data.toString();
}
}
private EndTxnRequest(EndTxnRequestData data, short version) {
super(ApiKeys.END_TXN, version);
this.data = data;
}
public TransactionResult result() {
if (data.committed())
return TransactionResult.COMMIT;
else
return TransactionResult.ABORT;
}
@Override
public EndTxnRequestData data() {
return data;
}
@Override
public EndTxnResponse getErrorResponse(int throttleTimeMs, Throwable e) {
return new EndTxnResponse(new EndTxnResponseData()
.setErrorCode(Errors.forException(e).code())
.setThrottleTimeMs(throttleTimeMs)
);
}
public static EndTxnRequest parse(Readable readable, short version) {
return new EndTxnRequest(new EndTxnRequestData(readable, version), version);
}
}
| Builder |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/collection/internal/CollectionLogger.java | {
"start": 999,
"end": 2946
} | interface ____ extends BasicLogger {
String NAME = SubSystemLogging.BASE + ".collection";
CollectionLogger COLLECTION_LOGGER = Logger.getMessageLogger( MethodHandles.lookup(), CollectionLogger.class, NAME );
@LogMessage(level = WARN)
@Message(id = 90030001, value = "Unable to close temporary session used to load lazy collection associated to no session")
void unableToCloseTemporarySession();
@LogMessage(level = WARN)
@Message(id = 90030002, value = "Detaching an uninitialized collection with enabled filters from a session: %s")
void enabledFiltersWhenDetachFromSession(String collectionInfoString);
@LogMessage(level = WARN)
@Message(id = 90030004, value = "Attaching an uninitialized collection with queued operations to a session: %s")
void queuedOperationWhenAttachToSession(String collectionInfoString);
@LogMessage(level = INFO)
@Message(id = 90030005, value = "Detaching an uninitialized collection with queued operations from a session: %s")
void queuedOperationWhenDetachFromSession(String collectionInfoString);
@LogMessage(level = DEBUG)
@Message(id = 90030006, value = "Detaching an uninitialized collection with queued operations from a session due to rollback: %s")
void queuedOperationWhenDetachFromSessionOnRollback(String collectionInfoString);
@LogMessage(level = WARN)
@Message(id = 90030007, value = "Cannot unset session in a collection because an unexpected session is defined."
+ " A persistent collection may only be associated with one session at a time. %s")
void logCannotUnsetUnexpectedSessionInCollection(String msg);
@LogMessage(level = WARN)
@Message(id = 90030008, value = "An unexpected session is defined for a collection, but the collection is not connected to that session."
+ " A persistent collection may only be associated with one session at a time. Overwriting session. %s")
void logUnexpectedSessionInCollectionNotConnected(String msg);
}
| CollectionLogger |
java | quarkusio__quarkus | extensions/redis-client/runtime/src/main/java/io/quarkus/redis/datasource/timeseries/Filter.java | {
"start": 273,
"end": 3496
} | interface ____ {
String toString();
/**
* Creates a {@code label=value}, selecting samples where the label equals value.
*
* @param label the label, must not be {@code null}
* @param value the value, must not be {@code null}
* @return the filter
*/
static Filter withLabel(String label, Object value) {
nonNull(label, "label");
nonNull(value, "value");
return new Filter() {
@Override
public String toString() {
return label + "=" + value;
}
};
}
/**
* Creates a {@code label!=value}, selecting samples where the label is not equal to value.
*
* @param label the label, must not be {@code null}
* @param value the value, must not be {@code null}
* @return the filter
*/
static Filter withoutLabel(String label, String value) {
nonNull(label, "label");
nonNull(value, "value");
return new Filter() {
@Override
public String toString() {
return label + "!=" + value;
}
};
}
/**
* Creates a {@code label=}, selecting samples containing the given label.
*
* @param label the label, must not be {@code null}
* @return the filter
*/
static Filter withLabel(String label) {
nonNull(label, "label");
return new Filter() {
@Override
public String toString() {
return label + "=";
}
};
}
/**
* Creates a {@code label!=}, selecting samples that do not have the given label.
*
* @param label the label, must not be {@code null}
* @return the filter
*/
static Filter withoutLabel(String label) {
nonNull(label, "label");
return new Filter() {
@Override
public String toString() {
return label + "!=";
}
};
}
/**
* Creates a {@code label=(value1,value2,...)}, selecting samples with the given label equals one of the values
* in the list
*
* @param label the label, must not be {@code null}
* @return the filter
*/
static Filter withLabelHavingValueFrom(String label, String... values) {
nonNull(label, "label");
doesNotContainNull(values, "values");
return new Filter() {
@Override
public String toString() {
return label + "=(" + String.join(",", values) + ")";
}
};
}
/**
* Creates a {@code label!=(value1,value2,...)}, selecting samples with the given label with a value not equal to
* any of the values in the list.
*
* @param label the label, must not be {@code null}
* @param values the values
* @return the filter
*/
static Filter withLabelNotHavingValueFrom(String label, String... values) {
nonNull(label, "label");
doesNotContainNull(values, "values");
return new Filter() {
@Override
public String toString() {
return label + "!=(" + String.join(",", values) + ")";
}
};
}
}
| Filter |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/FluxRepeat.java | {
"start": 2134,
"end": 3425
} | class ____<T>
extends Operators.MultiSubscriptionSubscriber<T, T> {
final CorePublisher<? extends T> source;
long remaining;
volatile int wip;
@SuppressWarnings("rawtypes")
static final AtomicIntegerFieldUpdater<RepeatSubscriber> WIP =
AtomicIntegerFieldUpdater.newUpdater(RepeatSubscriber.class, "wip");
long produced;
RepeatSubscriber(CorePublisher<? extends T> source, CoreSubscriber<? super T> actual, long remaining) {
super(actual);
this.source = source;
this.remaining = remaining;
}
@Override
public void onNext(T t) {
produced++;
actual.onNext(t);
}
@Override
public void onComplete() {
long r = remaining;
if (r != Long.MAX_VALUE) {
if (r == 0) {
actual.onComplete();
return;
}
remaining = r - 1;
}
resubscribe();
}
void resubscribe() {
if (WIP.getAndIncrement(this) == 0) {
do {
if (isCancelled()) {
return;
}
long c = produced;
if (c != 0L) {
produced = 0L;
produced(c);
}
source.subscribe(this);
} while (WIP.decrementAndGet(this) != 0);
}
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return super.scanUnsafe(key);
}
}
}
| RepeatSubscriber |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/lock/LockInterceptorDeadlockTest.java | {
"start": 969,
"end": 1386
} | class ____ {
@Lock(Type.READ)
boolean read() {
return write();
}
@Lock(Type.READ)
boolean nestedRead() {
return read();
}
@Lock(Type.WRITE)
boolean write() {
return true;
}
@Lock(Type.WRITE)
boolean nestedWrite() {
return nestedRead();
}
}
}
| SimpleApplicationScopedBean |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/mappedsuperclass/intermediate/AccountBase.java | {
"start": 404,
"end": 495
} | class ____ the hierarchy.
*
* @author Saša Obradović
*/
@MappedSuperclass
public abstract | in |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/async/AsyncLoggerContextSelector.java | {
"start": 1299,
"end": 2433
} | class ____ extends ClassLoaderContextSelector {
/**
* Returns {@code true} if the user specified this selector as the Log4jContextSelector, to make all loggers
* asynchronous.
*
* @return {@code true} if all loggers are asynchronous, {@code false} otherwise.
*/
public static boolean isSelected() {
return AsyncLoggerContextSelector.class
.getName()
.equals(PropertiesUtil.getProperties().getStringProperty(Constants.LOG4J_CONTEXT_SELECTOR));
}
@Override
protected LoggerContext createContext(final String name, final URI configLocation) {
return new AsyncLoggerContext(name, null, configLocation);
}
@Override
protected String toContextMapKey(final ClassLoader loader) {
// LOG4J2-666 ensure unique name across separate instances created by webapp classloaders
return "AsyncContext@" + Integer.toHexString(System.identityHashCode(loader));
}
@Override
protected String defaultContextName() {
return "DefaultAsyncContext@" + Thread.currentThread().getName();
}
}
| AsyncLoggerContextSelector |
java | apache__flink | flink-core/src/test/java/org/apache/flink/core/io/LocatableSplitAssignerTest.java | {
"start": 1156,
"end": 17482
} | class ____ {
@Test
void testSerialSplitAssignmentWithNullHost() {
final int NUM_SPLITS = 50;
final String[][] hosts = new String[][] {new String[] {"localhost"}, new String[0], null};
// load some splits
Set<LocatableInputSplit> splits = new HashSet<>();
for (int i = 0; i < NUM_SPLITS; i++) {
splits.add(new LocatableInputSplit(i, hosts[i % 3]));
}
// get all available splits
LocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);
InputSplit is = null;
while ((is = ia.getNextInputSplit(null, 0)) != null) {
assertThat(splits.remove(is)).isTrue();
}
// check we had all
assertThat(splits).isEmpty();
assertThat(ia.getNextInputSplit("", 0)).isNull();
assertThat(ia.getNumberOfRemoteAssignments()).isEqualTo(NUM_SPLITS);
assertThat(ia.getNumberOfLocalAssignments()).isZero();
}
@Test
void testSerialSplitAssignmentAllForSameHost() {
final int NUM_SPLITS = 50;
// load some splits
Set<LocatableInputSplit> splits = new HashSet<>();
for (int i = 0; i < NUM_SPLITS; i++) {
splits.add(new LocatableInputSplit(i, "testhost"));
}
// get all available splits
LocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);
InputSplit is = null;
while ((is = ia.getNextInputSplit("testhost", 0)) != null) {
assertThat(splits.remove(is)).isTrue();
}
// check we had all
assertThat(splits).isEmpty();
assertThat(ia.getNextInputSplit("", 0)).isNull();
assertThat(ia.getNumberOfRemoteAssignments()).isZero();
assertThat(ia.getNumberOfLocalAssignments()).isEqualTo(NUM_SPLITS);
}
@Test
void testSerialSplitAssignmentAllForRemoteHost() {
final String[] hosts = {"host1", "host1", "host1", "host2", "host2", "host3"};
final int NUM_SPLITS = 10 * hosts.length;
// load some splits
Set<LocatableInputSplit> splits = new HashSet<>();
for (int i = 0; i < NUM_SPLITS; i++) {
splits.add(new LocatableInputSplit(i, hosts[i % hosts.length]));
}
// get all available splits
LocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);
InputSplit is = null;
while ((is = ia.getNextInputSplit("testhost", 0)) != null) {
assertThat(splits.remove(is)).isTrue();
}
// check we had all
assertThat(splits).isEmpty();
assertThat(ia.getNextInputSplit("anotherHost", 0)).isNull();
assertThat(ia.getNumberOfRemoteAssignments()).isEqualTo(NUM_SPLITS);
assertThat(ia.getNumberOfLocalAssignments()).isZero();
}
@Test
void testSerialSplitAssignmentSomeForRemoteHost() {
// host1 reads all local
// host2 reads 10 local and 10 remote
// host3 reads all remote
final String[] hosts = {"host1", "host2", "host3"};
final int NUM_LOCAL_HOST1_SPLITS = 20;
final int NUM_LOCAL_HOST2_SPLITS = 10;
final int NUM_REMOTE_SPLITS = 30;
final int NUM_LOCAL_SPLITS = NUM_LOCAL_HOST1_SPLITS + NUM_LOCAL_HOST2_SPLITS;
// load local splits
int splitCnt = 0;
Set<LocatableInputSplit> splits = new HashSet<>();
// host1 splits
for (int i = 0; i < NUM_LOCAL_HOST1_SPLITS; i++) {
splits.add(new LocatableInputSplit(splitCnt++, "host1"));
}
// host2 splits
for (int i = 0; i < NUM_LOCAL_HOST2_SPLITS; i++) {
splits.add(new LocatableInputSplit(splitCnt++, "host2"));
}
// load remote splits
for (int i = 0; i < NUM_REMOTE_SPLITS; i++) {
splits.add(new LocatableInputSplit(splitCnt++, "remoteHost"));
}
// get all available splits
LocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);
InputSplit is = null;
int i = 0;
while ((is = ia.getNextInputSplit(hosts[i++ % hosts.length], 0)) != null) {
assertThat(splits.remove(is)).isTrue();
}
// check we had all
assertThat(splits).isEmpty();
assertThat(ia.getNextInputSplit("anotherHost", 0)).isNull();
assertThat(ia.getNumberOfRemoteAssignments()).isEqualTo(NUM_REMOTE_SPLITS);
assertThat(ia.getNumberOfLocalAssignments()).isEqualTo(NUM_LOCAL_SPLITS);
}
@Test
void testSerialSplitAssignmentMultiLocalHost() {
final String[] localHosts = {"local1", "local2", "local3"};
final String[] remoteHosts = {"remote1", "remote2", "remote3"};
final String[] requestingHosts = {"local3", "local2", "local1", "other"};
final int NUM_THREE_LOCAL_SPLITS = 10;
final int NUM_TWO_LOCAL_SPLITS = 10;
final int NUM_ONE_LOCAL_SPLITS = 10;
final int NUM_LOCAL_SPLITS = 30;
final int NUM_REMOTE_SPLITS = 10;
final int NUM_SPLITS = 40;
String[] threeLocalHosts = localHosts;
String[] twoLocalHosts = {localHosts[0], localHosts[1], remoteHosts[0]};
String[] oneLocalHost = {localHosts[0], remoteHosts[0], remoteHosts[1]};
String[] noLocalHost = remoteHosts;
int splitCnt = 0;
Set<LocatableInputSplit> splits = new HashSet<>();
// add splits with three local hosts
for (int i = 0; i < NUM_THREE_LOCAL_SPLITS; i++) {
splits.add(new LocatableInputSplit(splitCnt++, threeLocalHosts));
}
// add splits with two local hosts
for (int i = 0; i < NUM_TWO_LOCAL_SPLITS; i++) {
splits.add(new LocatableInputSplit(splitCnt++, twoLocalHosts));
}
// add splits with two local hosts
for (int i = 0; i < NUM_ONE_LOCAL_SPLITS; i++) {
splits.add(new LocatableInputSplit(splitCnt++, oneLocalHost));
}
// add splits with two local hosts
for (int i = 0; i < NUM_REMOTE_SPLITS; i++) {
splits.add(new LocatableInputSplit(splitCnt++, noLocalHost));
}
// get all available splits
LocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);
LocatableInputSplit is = null;
for (int i = 0; i < NUM_SPLITS; i++) {
String host = requestingHosts[i % requestingHosts.length];
is = ia.getNextInputSplit(host, 0);
// check valid split
assertThat(is).isNotNull();
// check unassigned split
assertThat(splits.remove(is)).isTrue();
// check priority of split
if (host.equals(localHosts[0])) {
assertThat(is.getHostnames()).isEqualTo(oneLocalHost);
} else if (host.equals(localHosts[1])) {
assertThat(is.getHostnames()).isEqualTo(twoLocalHosts);
} else if (host.equals(localHosts[2])) {
assertThat(is.getHostnames()).isEqualTo(threeLocalHosts);
} else {
assertThat(is.getHostnames()).isEqualTo(noLocalHost);
}
}
// check we had all
assertThat(splits).isEmpty();
assertThat(ia.getNextInputSplit("anotherHost", 0)).isNull();
assertThat(ia.getNumberOfRemoteAssignments()).isEqualTo(NUM_REMOTE_SPLITS);
assertThat(ia.getNumberOfLocalAssignments()).isEqualTo(NUM_LOCAL_SPLITS);
}
@Test
void testSerialSplitAssignmentMixedLocalHost() {
final String[] hosts = {"host1", "host1", "host1", "host2", "host2", "host3"};
final int NUM_SPLITS = 10 * hosts.length;
// load some splits
Set<LocatableInputSplit> splits = new HashSet<>();
for (int i = 0; i < NUM_SPLITS; i++) {
splits.add(new LocatableInputSplit(i, hosts[i % hosts.length]));
}
// get all available splits
LocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);
InputSplit is = null;
int i = 0;
while ((is = ia.getNextInputSplit(hosts[i++ % hosts.length], 0)) != null) {
assertThat(splits.remove(is)).isTrue();
}
// check we had all
assertThat(splits).isEmpty();
assertThat(ia.getNextInputSplit("anotherHost", 0)).isNull();
assertThat(ia.getNumberOfRemoteAssignments()).isZero();
assertThat(ia.getNumberOfLocalAssignments()).isEqualTo(NUM_SPLITS);
}
@Test
void testConcurrentSplitAssignmentNullHost() throws InterruptedException {
final int NUM_THREADS = 10;
final int NUM_SPLITS = 500;
final int SUM_OF_IDS = (NUM_SPLITS - 1) * (NUM_SPLITS) / 2;
final String[][] hosts = new String[][] {new String[] {"localhost"}, new String[0], null};
// load some splits
Set<LocatableInputSplit> splits = new HashSet<>();
for (int i = 0; i < NUM_SPLITS; i++) {
splits.add(new LocatableInputSplit(i, hosts[i % 3]));
}
final LocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);
final AtomicInteger splitsRetrieved = new AtomicInteger(0);
final AtomicInteger sumOfIds = new AtomicInteger(0);
Runnable retriever =
() -> {
LocatableInputSplit split;
while ((split = ia.getNextInputSplit(null, 0)) != null) {
splitsRetrieved.incrementAndGet();
sumOfIds.addAndGet(split.getSplitNumber());
}
};
// create the threads
Thread[] threads = new Thread[NUM_THREADS];
for (int i = 0; i < NUM_THREADS; i++) {
threads[i] = new Thread(retriever);
threads[i].setDaemon(true);
}
// launch concurrently
for (int i = 0; i < NUM_THREADS; i++) {
threads[i].start();
}
// sync
for (int i = 0; i < NUM_THREADS; i++) {
threads[i].join(5000);
}
// verify
for (int i = 0; i < NUM_THREADS; i++) {
assertThat(threads[i].isAlive()).isFalse();
}
assertThat(splitsRetrieved).hasValue(NUM_SPLITS);
assertThat(sumOfIds).hasValue(SUM_OF_IDS);
// nothing left
assertThat(ia.getNextInputSplit("", 0)).isNull();
assertThat(ia.getNumberOfRemoteAssignments()).isEqualTo(NUM_SPLITS);
assertThat(ia.getNumberOfLocalAssignments()).isZero();
}
@Test
void testConcurrentSplitAssignmentForSingleHost() throws InterruptedException {
final int NUM_THREADS = 10;
final int NUM_SPLITS = 500;
final int SUM_OF_IDS = (NUM_SPLITS - 1) * (NUM_SPLITS) / 2;
// load some splits
Set<LocatableInputSplit> splits = new HashSet<>();
for (int i = 0; i < NUM_SPLITS; i++) {
splits.add(new LocatableInputSplit(i, "testhost"));
}
final LocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);
final AtomicInteger splitsRetrieved = new AtomicInteger(0);
final AtomicInteger sumOfIds = new AtomicInteger(0);
Runnable retriever =
() -> {
LocatableInputSplit split;
while ((split = ia.getNextInputSplit("testhost", 0)) != null) {
splitsRetrieved.incrementAndGet();
sumOfIds.addAndGet(split.getSplitNumber());
}
};
// create the threads
Thread[] threads = new Thread[NUM_THREADS];
for (int i = 0; i < NUM_THREADS; i++) {
threads[i] = new Thread(retriever);
threads[i].setDaemon(true);
}
// launch concurrently
for (int i = 0; i < NUM_THREADS; i++) {
threads[i].start();
}
// sync
for (int i = 0; i < NUM_THREADS; i++) {
threads[i].join(5000);
}
// verify
for (int i = 0; i < NUM_THREADS; i++) {
assertThat(threads[i].isAlive()).isFalse();
}
assertThat(splitsRetrieved).hasValue(NUM_SPLITS);
assertThat(sumOfIds).hasValue(SUM_OF_IDS);
// nothing left
assertThat(ia.getNextInputSplit("testhost", 0)).isNull();
assertThat(ia.getNumberOfRemoteAssignments()).isZero();
assertThat(ia.getNumberOfLocalAssignments()).isEqualTo(NUM_SPLITS);
}
@Test
void testConcurrentSplitAssignmentForMultipleHosts() throws InterruptedException {
final int NUM_THREADS = 10;
final int NUM_SPLITS = 500;
final int SUM_OF_IDS = (NUM_SPLITS - 1) * (NUM_SPLITS) / 2;
final String[] hosts = {"host1", "host1", "host1", "host2", "host2", "host3"};
// load some splits
Set<LocatableInputSplit> splits = new HashSet<>();
for (int i = 0; i < NUM_SPLITS; i++) {
splits.add(new LocatableInputSplit(i, hosts[i % hosts.length]));
}
final LocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);
final AtomicInteger splitsRetrieved = new AtomicInteger(0);
final AtomicInteger sumOfIds = new AtomicInteger(0);
Runnable retriever =
() -> {
final String threadHost = hosts[(int) (Math.random() * hosts.length)];
LocatableInputSplit split;
while ((split = ia.getNextInputSplit(threadHost, 0)) != null) {
splitsRetrieved.incrementAndGet();
sumOfIds.addAndGet(split.getSplitNumber());
}
};
// create the threads
Thread[] threads = new Thread[NUM_THREADS];
for (int i = 0; i < NUM_THREADS; i++) {
threads[i] = new Thread(retriever);
threads[i].setDaemon(true);
}
// launch concurrently
for (int i = 0; i < NUM_THREADS; i++) {
threads[i].start();
}
// sync
for (int i = 0; i < NUM_THREADS; i++) {
threads[i].join(5000);
}
// verify
for (int i = 0; i < NUM_THREADS; i++) {
assertThat(threads[i].isAlive()).isFalse();
}
assertThat(splitsRetrieved).hasValue(NUM_SPLITS);
assertThat(sumOfIds).hasValue(SUM_OF_IDS);
// nothing left
assertThat(ia.getNextInputSplit("testhost", 0)).isNull();
// at least one fraction of hosts needs be local, no matter how bad the thread races
assertThat(ia.getNumberOfLocalAssignments())
.isGreaterThanOrEqualTo(NUM_SPLITS / hosts.length);
}
@Test
void testAssignmentOfManySplitsRandomly() {
long seed = Calendar.getInstance().getTimeInMillis();
final int NUM_SPLITS = 65536;
final String[] splitHosts = new String[256];
final String[] requestingHosts = new String[256];
final Random rand = new Random(seed);
for (int i = 0; i < splitHosts.length; i++) {
splitHosts[i] = "localHost" + i;
}
for (int i = 0; i < requestingHosts.length; i++) {
if (i % 2 == 0) {
requestingHosts[i] = "localHost" + i;
} else {
requestingHosts[i] = "remoteHost" + i;
}
}
String[] stringArray = {};
Set<String> hosts = new HashSet<>();
Set<LocatableInputSplit> splits = new HashSet<>();
for (int i = 0; i < NUM_SPLITS; i++) {
while (hosts.size() < 3) {
hosts.add(splitHosts[rand.nextInt(splitHosts.length)]);
}
splits.add(new LocatableInputSplit(i, hosts.toArray(stringArray)));
hosts.clear();
}
final LocatableInputSplitAssigner ia = new LocatableInputSplitAssigner(splits);
for (int i = 0; i < NUM_SPLITS; i++) {
LocatableInputSplit split =
ia.getNextInputSplit(requestingHosts[rand.nextInt(requestingHosts.length)], 0);
assertThat(split).isNotNull();
assertThat(splits.remove(split)).isTrue();
}
assertThat(splits).isEmpty();
assertThat(ia.getNextInputSplit("testHost", 0)).isNull();
}
}
| LocatableSplitAssignerTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/constraint/package-info.java | {
"start": 905,
"end": 989
} | class ____ placement constraints.
*/
package org.apache.hadoop.yarn.util.constraint; | for |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/util/ReflectionUtils.java | {
"start": 62818,
"end": 72181
} | interface ____ methods that have not
// been overridden.
List<Method> visibleDefaultMethods = Arrays.stream(clazz.getMethods())
.filter(Method::isDefault)
.collect(toCollection(ArrayList::new));
if (visibleDefaultMethods.isEmpty()) {
return visibleDefaultMethods;
}
return Arrays.stream(clazz.getInterfaces())
.map(ReflectionUtils::getMethods)
.flatMap(List::stream)
.filter(visibleDefaultMethods::contains)
.collect(toCollection(ArrayList::new));
// @formatter:on
}
private static List<Field> toSortedMutableList(Field[] fields) {
return toSortedMutableList(fields, ReflectionUtils::defaultFieldSorter);
}
private static List<Method> toSortedMutableList(Method[] methods) {
return toSortedMutableList(methods, ReflectionUtils::defaultMethodSorter);
}
private static List<Class<?>> toSortedMutableList(Class<?>[] classes) {
return toSortedMutableList(classes, ReflectionUtils::defaultClassSorter);
}
private static <T> List<T> toSortedMutableList(T[] items, Comparator<? super T> comparator) {
List<T> result = new ArrayList<>(items.length);
Collections.addAll(result, items);
result.sort(comparator);
return result;
}
/**
* Field comparator inspired by JUnit 4's {@code org.junit.internal.MethodSorter}
* implementation.
*/
private static int defaultFieldSorter(Field field1, Field field2) {
return Integer.compare(field1.getName().hashCode(), field2.getName().hashCode());
}
/**
* Method comparator based upon JUnit 4's {@code org.junit.internal.MethodSorter}
* implementation.
*/
private static int defaultMethodSorter(Method method1, Method method2) {
String name1 = method1.getName();
String name2 = method2.getName();
int comparison = Integer.compare(name1.hashCode(), name2.hashCode());
if (comparison == 0) {
comparison = name1.compareTo(name2);
if (comparison == 0) {
comparison = method1.toString().compareTo(method2.toString());
}
}
return comparison;
}
/**
* Class comparator to achieve deterministic but nonobvious order.
*/
private static int defaultClassSorter(Class<?> class1, Class<?> class2) {
String name1 = class1.getName();
String name2 = class2.getName();
int comparison = Integer.compare(name1.hashCode(), name2.hashCode());
if (comparison == 0) {
comparison = name1.compareTo(name2);
}
return comparison;
}
private static List<Method> getInterfaceMethods(Class<?> clazz, HierarchyTraversalMode traversalMode) {
List<Method> allInterfaceMethods = new ArrayList<>();
for (Class<?> ifc : clazz.getInterfaces()) {
// @formatter:off
Method[] localInterfaceMethods = getMethods(ifc).stream()
.filter(m -> !isAbstract(m))
.toArray(Method[]::new);
Method[] superinterfaceMethods = getInterfaceMethods(ifc, traversalMode).stream()
.filter(method -> isNotOverriddenByLocalMethods(method, localInterfaceMethods))
.toArray(Method[]::new);
// @formatter:on
if (traversalMode == TOP_DOWN) {
Collections.addAll(allInterfaceMethods, superinterfaceMethods);
}
Collections.addAll(allInterfaceMethods, localInterfaceMethods);
if (traversalMode == BOTTOM_UP) {
Collections.addAll(allInterfaceMethods, superinterfaceMethods);
}
}
return allInterfaceMethods;
}
private static List<Field> getInterfaceFields(Class<?> clazz, HierarchyTraversalMode traversalMode) {
List<Field> allInterfaceFields = new ArrayList<>();
for (Class<?> ifc : clazz.getInterfaces()) {
Field[] localInterfaceFields = ifc.getFields();
Arrays.sort(localInterfaceFields, ReflectionUtils::defaultFieldSorter);
List<Field> superinterfaceFields = getInterfaceFields(ifc, traversalMode);
if (traversalMode == TOP_DOWN) {
allInterfaceFields.addAll(superinterfaceFields);
}
Collections.addAll(allInterfaceFields, localInterfaceFields);
if (traversalMode == BOTTOM_UP) {
allInterfaceFields.addAll(superinterfaceFields);
}
}
return allInterfaceFields;
}
private static List<Field> getSuperclassFields(Class<?> clazz, HierarchyTraversalMode traversalMode) {
Class<?> superclass = clazz.getSuperclass();
if (!isSearchable(superclass)) {
return Collections.emptyList();
}
return findAllFieldsInHierarchy(superclass, traversalMode);
}
private static List<Method> getSuperclassMethods(Class<?> clazz, HierarchyTraversalMode traversalMode) {
Class<?> superclass = clazz.getSuperclass();
if (!isSearchable(superclass)) {
return Collections.emptyList();
}
return findAllMethodsInHierarchy(superclass, traversalMode);
}
private static boolean isNotOverriddenByLocalMethods(Method method, Method[] localMethods) {
for (Method local : localMethods) {
if (isMethodOverriddenBy(method, local)) {
return false;
}
}
return true;
}
private static boolean isMethodOverriddenBy(Method upper, Method lower) {
// A static method cannot override anything.
if (Modifier.isStatic(lower.getModifiers())) {
return false;
}
// Cannot override a private, static, or final method.
int modifiers = upper.getModifiers();
if (Modifier.isPrivate(modifiers) || Modifier.isStatic(modifiers) || Modifier.isFinal(modifiers)) {
return false;
}
// Cannot override a package-private method in another package.
if (isPackagePrivate(upper) && !isDeclaredInSamePackage(upper, lower)) {
return false;
}
return hasCompatibleSignature(upper, lower.getName(), lower.getParameterTypes());
}
/**
* @since 5.14.1
*/
@API(status = INTERNAL, since = "5.14.1")
public static boolean isPackagePrivate(Member member) {
int modifiers = member.getModifiers();
return !(Modifier.isPublic(modifiers) || Modifier.isProtected(modifiers) || Modifier.isPrivate(modifiers));
}
private static boolean isDeclaredInSamePackage(Method m1, Method m2) {
return isDeclaredInSamePackage(m1.getDeclaringClass(), m2.getDeclaringClass());
}
/**
* @since 5.14.1
*/
@API(status = INTERNAL, since = "5.14.1")
public static boolean isDeclaredInSamePackage(Class<?> c1, Class<?> c2) {
return c1.getPackageName().equals(c2.getPackageName());
}
/**
* Determine if the supplied candidate method (typically a method higher in
* the type hierarchy) has a signature that is compatible with a method that
* has the supplied name and parameter types, taking method sub-signatures
* and generics into account.
*/
private static boolean hasCompatibleSignature(Method candidate, String methodName, Class<?>[] parameterTypes) {
if (!methodName.equals(candidate.getName())) {
return false;
}
if (parameterTypes.length != candidate.getParameterCount()) {
return false;
}
Class<?>[] candidateParameterTypes = candidate.getParameterTypes();
// trivial case: parameter types exactly match
if (Arrays.equals(parameterTypes, candidateParameterTypes)) {
return true;
}
// param count is equal, but types do not match exactly: check for method sub-signatures
// https://docs.oracle.com/javase/specs/jls/se8/html/jls-8.html#jls-8.4.2
for (int i = 0; i < parameterTypes.length; i++) {
Class<?> lowerType = parameterTypes[i];
Class<?> upperType = candidateParameterTypes[i];
if (!upperType.isAssignableFrom(lowerType)) {
return false;
}
}
// lower is sub-signature of upper: check for generics in upper method
return isGeneric(candidate);
}
static boolean isGeneric(Method method) {
return isGeneric(method.getGenericReturnType())
|| Arrays.stream(method.getGenericParameterTypes()).anyMatch(ReflectionUtils::isGeneric);
}
private static boolean isGeneric(Type type) {
return type instanceof TypeVariable || type instanceof GenericArrayType;
}
/**
* @since 1.11
*/
@API(status = INTERNAL, since = "1.11")
@SuppressWarnings("deprecation") // "AccessibleObject.isAccessible()" is deprecated in Java 9
public static <T extends Executable> T makeAccessible(T executable) {
if ((!isPublic(executable) || !isPublic(executable.getDeclaringClass())) && !executable.isAccessible()) {
executable.setAccessible(true);
}
return executable;
}
/**
* @since 1.12
*/
@API(status = INTERNAL, since = "1.12")
@SuppressWarnings("deprecation") // "AccessibleObject.isAccessible()" is deprecated in Java 9
public static Field makeAccessible(Field field) {
if ((!isPublic(field) || !isPublic(field.getDeclaringClass()) || isFinal(field)) && !field.isAccessible()) {
field.setAccessible(true);
}
return field;
}
/**
* Return all classes and interfaces that can be used as assignment types
* for instances of the specified {@link Class}, including itself.
*
* @param clazz the {@code Class} to look up
* @see Class#isAssignableFrom
*/
public static Set<Class<?>> getAllAssignmentCompatibleClasses(Class<?> clazz) {
Preconditions.notNull(clazz, "Class must not be null");
Set<Class<?>> result = new LinkedHashSet<>();
getAllAssignmentCompatibleClasses(clazz, result);
return result;
}
private static void getAllAssignmentCompatibleClasses(Class<?> clazz, Set<Class<?>> result) {
for (Class<?> current = clazz; current != null; current = current.getSuperclass()) {
result.add(current);
for (Class<?> interfaceClass : current.getInterfaces()) {
if (!result.contains(interfaceClass)) {
getAllAssignmentCompatibleClasses(interfaceClass, result);
}
}
}
}
/**
* Determine if the supplied | default |
java | apache__maven | api/maven-api-core/src/main/java/org/apache/maven/api/services/xml/XmlFactory.java | {
"start": 1197,
"end": 1330
} | interface ____ read/write objects to/from XML.
*
* @param <T> the object type to read/write
* @since 4.0.0
*/
@Experimental
public | to |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/ShortParam.java | {
"start": 887,
"end": 1916
} | class ____ extends Param<Short, ShortParam.Domain> {
ShortParam(final Domain domain, final Short value,
final Short min, final Short max) {
super(domain, value);
checkRange(min, max);
}
private void checkRange(final Short min, final Short max) {
if (value == null) {
return;
}
if (min != null && value < min) {
throw new IllegalArgumentException("Invalid parameter range: " + getName()
+ " = " + domain.toString(value) + " < " + domain.toString(min));
}
if (max != null && value > max) {
throw new IllegalArgumentException("Invalid parameter range: " + getName()
+ " = " + domain.toString(value) + " > " + domain.toString(max));
}
}
@Override
public String toString() {
return getName() + "=" + domain.toString(getValue());
}
/** @return the parameter value as a string */
@Override
public final String getValueString() {
return domain.toString(getValue());
}
/** The domain of the parameter. */
static final | ShortParam |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableConcatMapScheduler.java | {
"start": 1312,
"end": 2490
} | class ____<T, U> extends AbstractObservableWithUpstream<T, U> {
final Function<? super T, ? extends ObservableSource<? extends U>> mapper;
final int bufferSize;
final ErrorMode delayErrors;
final Scheduler scheduler;
public ObservableConcatMapScheduler(ObservableSource<T> source, Function<? super T, ? extends ObservableSource<? extends U>> mapper,
int bufferSize, ErrorMode delayErrors, Scheduler scheduler) {
super(source);
this.mapper = mapper;
this.delayErrors = delayErrors;
this.bufferSize = Math.max(8, bufferSize);
this.scheduler = scheduler;
}
@Override
public void subscribeActual(Observer<? super U> observer) {
if (delayErrors == ErrorMode.IMMEDIATE) {
SerializedObserver<U> serial = new SerializedObserver<>(observer);
source.subscribe(new ConcatMapObserver<>(serial, mapper, bufferSize, scheduler.createWorker()));
} else {
source.subscribe(new ConcatMapDelayErrorObserver<>(observer, mapper, bufferSize, delayErrors == ErrorMode.END, scheduler.createWorker()));
}
}
static final | ObservableConcatMapScheduler |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configuration/HttpSecurityConfigurationTests.java | {
"start": 25614,
"end": 25964
} | class ____ {
@Bean
InMemoryUserDetailsManager userDetailsService() {
// @formatter:off
UserDetails user = User.withDefaultPasswordEncoder()
.username("user")
.password("password")
.roles("USER")
.build();
// @formatter:on
return new InMemoryUserDetailsManager(user);
}
}
@Configuration
static | UserDetailsConfig |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/Hibernate.java | {
"start": 12202,
"end": 18346
} | class ____ the proxied entity is assignable
* to the given class. This operation will initialize a proxy by side effect.
*
* @param proxy an entity instance or proxy
* @return {@code true} if the entity is an instance of the given class
*
* @since 6.2
*/
public static boolean isInstance(Object proxy, Class<?> entityClass) {
return entityClass.isInstance( proxy )
|| entityClass.isAssignableFrom( getClass( proxy ) );
}
/**
* Determines if the given attribute of the given entity instance is initialized.
* This operation returns {@code true} if the field or property references an
* unfetched collection or proxy.
*
* @param entity The entity instance or proxy
* @param attribute A persistent attribute of the entity
* @return true if the named property of the object is not listed as uninitialized;
* false otherwise
*/
public static <E> boolean isPropertyInitialized(E entity, Attribute<? super E, ?> attribute) {
return isPropertyInitialized( entity, attribute.getName() );
}
/**
* Determines if the field or property with the given name of the given entity
* instance is initialized. If the named property does not exist or is not
* persistent, this method always returns {@code true}. This operation returns
* {@code true} if the field or property references an unfetched collection or
* proxy.
*
* @param proxy The entity instance or proxy
* @param attributeName the name of a persistent attribute of the object
* @return true if the named property of the object is not listed as uninitialized;
* false otherwise
*
* @see jakarta.persistence.PersistenceUtil#isLoaded(Object, String)
*/
public static boolean isPropertyInitialized(Object proxy, String attributeName) {
final Object entity;
final var lazyInitializer = extractLazyInitializer( proxy );
if ( lazyInitializer != null ) {
if ( lazyInitializer.isUninitialized() ) {
return false;
}
else {
entity = lazyInitializer.getImplementation();
}
}
else {
entity = proxy;
}
final boolean attributeUnloaded =
isPersistentAttributeInterceptable( entity )
&& getAttributeInterceptor( entity )
instanceof BytecodeLazyAttributeInterceptor lazyAttributeInterceptor
&& !lazyAttributeInterceptor.isAttributeLoaded( attributeName );
return !attributeUnloaded;
}
/**
* Initializes the given attribute of the given entity instance. This operation
* does not fetch a collection or proxy referenced by the field or property.
*
* @param entity The entity instance or proxy
* @param attribute A persistent attribute of the entity
*/
public static <E> void initializeProperty(E entity, Attribute<? super E, ?> attribute) {
initializeProperty( entity, attribute.getName() );
}
/**
* Initializes the field or property with the given name of the given entity
* instance. This operation does not fetch a collection or proxy referenced
* by the field or property.
*
* @param proxy The entity instance or proxy
* @param attributeName the name of a persistent attribute of the object
*
* @see jakarta.persistence.PersistenceUnitUtil#load(Object, String)
*/
public static void initializeProperty(Object proxy, String attributeName) {
final var lazyInitializer = extractLazyInitializer( proxy );
final Object entity = lazyInitializer != null ? lazyInitializer.getImplementation() : proxy;
if ( isPersistentAttributeInterceptable( entity ) ) {
getAttributeInterceptor( entity ).readObject( entity, attributeName, null );
}
}
/**
* If the given object is not a proxy, return it. But, if it is a proxy, ensure
* that the proxy is initialized, and return a direct reference to its proxied
* entity object.
*
* @param proxy an object which might be a proxy for an entity
* @return a reference that is never proxied
*
* @throws LazyInitializationException if this operation is called on an
* uninitialized proxy that is not associated with an open session.
*/
public static Object unproxy(Object proxy) {
final var lazyInitializer = extractLazyInitializer( proxy );
return lazyInitializer != null ? lazyInitializer.getImplementation() : proxy;
}
/**
* If the given object is not a proxy, cast it to the given type, and return it.
* But, if it is a proxy, ensure that the proxy is initialized, and return a
* direct reference to its proxied entity object, after casting to the given type.
*
* @param proxy an object which might be a proxy for an entity
* @param entityClass an entity type to cast to
* @return a reference that is never proxied
*
* @throws LazyInitializationException if this operation is called on an
* uninitialized proxy that is not associated with an open session.
*/
public static <T> T unproxy(T proxy, Class<T> entityClass) {
return entityClass.cast( unproxy( proxy ) );
}
/**
* Obtain a detached, uninitialized reference (a proxy) for a persistent entity with
* the given identifier.
* <p>
* The returned proxy is not associated with any session, and cannot be initialized
* by calling {@link #initialize(Object)}. It can be used to represent a reference to
* the entity when working with a detached object graph.
*
* @param sessionFactory the session factory with which the entity is associated
* @param entityClass the entity class
* @param id the id of the persistent entity instance
*
* @return a detached uninitialized proxy
*
* @since 6.0
*/
@SuppressWarnings("unchecked")
public static <E> E createDetachedProxy(SessionFactory sessionFactory, Class<E> entityClass, Object id) {
final var persister =
sessionFactory.unwrap( SessionFactoryImplementor.class )
.getMappingMetamodel()
.findEntityDescriptor( entityClass );
if ( persister == null ) {
throw new UnknownEntityTypeException( entityClass );
}
return (E) persister.createProxy( id, null );
}
/**
* Operations for obtaining references to persistent collections of a certain type.
*
* @param <C> the type of collection, for example, {@code List<User>}
*
* @since 6.0
*/
public static final | of |
java | spring-projects__spring-boot | module/spring-boot-elasticsearch/src/main/java/org/springframework/boot/elasticsearch/docker/compose/ElasticsearchDockerComposeConnectionDetailsFactory.java | {
"start": 1482,
"end": 2113
} | class ____
extends DockerComposeConnectionDetailsFactory<ElasticsearchConnectionDetails> {
private static final int ELASTICSEARCH_PORT = 9200;
protected ElasticsearchDockerComposeConnectionDetailsFactory() {
super("elasticsearch");
}
@Override
protected ElasticsearchConnectionDetails getDockerComposeConnectionDetails(DockerComposeConnectionSource source) {
return new ElasticsearchDockerComposeConnectionDetails(source.getRunningService());
}
/**
* {@link ElasticsearchConnectionDetails} backed by an {@code elasticsearch}
* {@link RunningService}.
*/
static | ElasticsearchDockerComposeConnectionDetailsFactory |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/binder/AttributeBinder.java | {
"start": 456,
"end": 808
} | interface ____ directly with model objects
* like {@link PersistentClass} and {@link Property} to implement the
* semantics of some {@linkplain org.hibernate.annotations.AttributeBinderType
* custom mapping annotation}.
*
* @see org.hibernate.annotations.AttributeBinderType
* @see TypeBinder
*
* @author Gavin King
*/
@Incubating
public | interacts |
java | apache__camel | components/camel-micrometer/src/test/java/org/apache/camel/component/micrometer/routepolicy/MicrometerRoutePolicyMulticastSubRouteTest.java | {
"start": 6324,
"end": 7368
} | class ____ exception
break;
default: {
fail("Unexpected meter " + meterName);
break;
}
}
});
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
onException(IllegalStateException.class)
.handled(true);
from("direct:foo").routeId("foo").to("mock:foo");
from("direct:bar").routeId("bar").multicast().to("mock:bar1", "mock:bar2");
from("direct:multicast").routeId("multicast").multicast().to("direct:foo", "direct:bar",
"direct:failureHandled");
from("direct:failure").routeId("failure").throwException(new Exception("forced"));
from("direct:failureHandled").routeId("failureHandled").throwException(new IllegalStateException("forced"));
}
};
}
}
| cast |
java | micronaut-projects__micronaut-core | inject-groovy/src/main/groovy/io/micronaut/ast/groovy/scan/Attribute.java | {
"start": 853,
"end": 1599
} | class ____ {
/**
* The type of this attribute.
*/
final String type;
/**
* The raw value of this attribute, used only for unknown attributes.
*/
byte[] value;
/**
* The next attribute in this attribute list. May be {@code null}.
*/
Attribute next;
/**
* Constructs a new empty attribute.
*
* @param type the type of the attribute.
*/
protected Attribute(final String type) {
this.type = type;
}
/**
* Reads a {@link #type type} attribute. This method must return a
* <i>new</i> {@link Attribute} object, of type {@link #type type},
* corresponding to the {@code len} bytes starting at the given offset, in
* the given | Attribute |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.