language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/hql/FunnyNamesTests.java | {
"start": 502,
"end": 733
} | class ____ {
@Test
public void basicTest(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
session.createQuery( "from Any" ).list();
session.createQuery( "from in.from.Any" ).list();
} );
}
}
| FunnyNamesTests |
java | google__dagger | javatests/dagger/internal/codegen/RepeatedModuleValidationTest.java | {
"start": 4097,
"end": 4340
} | interface ____ {",
" TestSubcomponent newTestSubcomponent();",
"}");
CompilerTests.daggerCompiler(MODULE_FILE, subcomponentFile, componentFile)
.compile(subject -> subject.hasErrorCount(0));
}
}
| TestComponent |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/batch/BatchRowCountWarningTest.java | {
"start": 6075,
"end": 6466
} | class ____ extends SubEntity {
private String nickname;
public MyEntity() {
}
public MyEntity(Long id, String name, String nickname) {
super( id, name );
this.nickname = nickname;
}
public String getNickname() {
return nickname;
}
public void setNickname(String nickname) {
this.nickname = nickname;
}
}
@Entity( name = "SpamEntity" )
public static | MyEntity |
java | quarkusio__quarkus | extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/CustomQueryMethodsAdder.java | {
"start": 1688,
"end": 32057
} | class ____ extends AbstractMethodsAdder {
private static final String QUERY_VALUE_FIELD = "value";
private static final String QUERY_COUNT_FIELD = "countQuery";
private static final String NAMED_QUERY_FIELD = "query";
private static final Pattern SELECT_CLAUSE = Pattern.compile("select\\s+(.+)\\s+from", Pattern.CASE_INSENSITIVE);
private static final Pattern FIELD_ALIAS = Pattern.compile(".*\\s+[as|AS]+\\s+([\\w\\.]+)");
private static final Pattern FIELD_NAME = Pattern.compile("(\\w+).*");
private static final Pattern NAMED_PARAMETER = Pattern.compile("\\:(\\w+)\\b");
private final IndexView index;
private final ClassOutput nonBeansClassOutput;
private final Consumer<String> customClassCreatedCallback;
private final FieldDescriptor operationsField;
public CustomQueryMethodsAdder(IndexView index, ClassOutput classOutput, Consumer<String> customClassCreatedCallback,
TypeBundle typeBundle) {
this.index = index;
this.nonBeansClassOutput = classOutput;
this.customClassCreatedCallback = customClassCreatedCallback;
String operationsName = typeBundle.operations().dotName().toString();
operationsField = of(operationsName, "INSTANCE", operationsName);
}
public void add(ClassCreator classCreator, FieldDescriptor entityClassFieldDescriptor, ClassInfo repositoryClassInfo,
ClassInfo entityClassInfo, String idTypeStr) {
// Remember custom return types: {resultType:{methodName:[fieldNames]}}
Map<DotName, Map<String, List<String>>> customResultTypes = new HashMap<>(3);
Map<DotName, DotName> customResultTypeNames = new HashMap<>(3);
Set<DotName> entityFieldTypeNames = new HashSet<>();
for (MethodInfo method : repositoryClassInfo.methods()) {
AnnotationInstance queryInstance = method.annotation(DotNames.SPRING_DATA_QUERY);
AnnotationInstance namedQueryInstance = getNamedQueryForMethod(method, entityClassInfo);
String methodName = method.name();
String repositoryName = repositoryClassInfo.name().toString();
String queryString;
if (queryInstance != null) {
verifyQueryAnnotation(queryInstance, methodName, repositoryName);
queryString = queryInstance.value(QUERY_VALUE_FIELD).asString().trim();
} else if (namedQueryInstance != null) {
queryString = namedQueryInstance.value(NAMED_QUERY_FIELD).asString().trim();
} else {
// handled by DerivedMethodsAdder
continue;
}
if (queryString.contains("#{")) {
throw new IllegalArgumentException("spEL expressions are not currently supported. " +
"Offending method is " + methodName + " of Repository " + repositoryName);
}
if (!(queryString.startsWith("select") || queryString.startsWith("SELECT")
|| queryString.startsWith("from") || queryString.startsWith("FROM")
|| queryString.startsWith("delete") || queryString.startsWith("DELETE")
|| queryString.startsWith("update") || queryString.startsWith("UPDATE"))) {
throw new IllegalArgumentException("Unsupported query type in @Query. " +
"Offending method is " + methodName + " of Repository " + repositoryName);
}
List<Type> methodParameterTypes = method.parameterTypes();
String[] methodParameterTypesStr = new String[methodParameterTypes.size()];
List<Integer> queryParameterIndexes = new ArrayList<>(methodParameterTypes.size());
Integer pageableParameterIndex = null;
Integer sortParameterIndex = null;
for (int i = 0; i < methodParameterTypes.size(); i++) {
DotName parameterType = methodParameterTypes.get(i).name();
methodParameterTypesStr[i] = parameterType.toString();
if (DotNames.SPRING_DATA_PAGEABLE.equals(parameterType)
|| DotNames.SPRING_DATA_PAGE_REQUEST.equals(parameterType)) {
if (pageableParameterIndex != null) {
throw new IllegalArgumentException("Method " + method.name() + " of Repository " + repositoryClassInfo
+ "has invalid parameters - only a single parameter of type" + DotNames.SPRING_DATA_PAGEABLE
+ " can be specified");
}
pageableParameterIndex = i;
} else if (DotNames.SPRING_DATA_SORT.equals(parameterType)) {
if (sortParameterIndex != null) {
throw new IllegalArgumentException("Method " + method.name() + " of Repository " + repositoryClassInfo
+ "has invalid parameters - only a single parameter of type" + DotNames.SPRING_DATA_SORT
+ " can be specified");
}
sortParameterIndex = i;
} else {
queryParameterIndexes.add(i);
}
}
// go through the method annotations, find the @Param annotation on parameters
// and map the name to the method param index
Map<String, Integer> namedParameterToIndex = new HashMap<>();
for (AnnotationInstance annotation : method.annotations(DotNames.SPRING_DATA_PARAM)) {
var index = (int) annotation.target().asMethodParameter().position();
namedParameterToIndex.put(annotation.value().asString(), index);
}
// if no or only some parameters are annotated with @Param, add the compiled names (if present)
if (namedParameterToIndex.size() < methodParameterTypes.size()) {
for (int index = 0; index < methodParameterTypes.size(); index++) {
if (namedParameterToIndex.values().contains(index)) {
continue;
}
String parameterName = method.parameterName(index);
if (parameterName != null) {
namedParameterToIndex.put(parameterName, index);
}
}
}
boolean isModifying = (method.annotation(DotNames.SPRING_DATA_MODIFYING) != null);
if (isModifying && (sortParameterIndex != null || pageableParameterIndex != null)) {
throw new IllegalArgumentException(
method.name() + " of Repository " + repositoryClassInfo
+ " is meant to be a insert/update/delete query and therefore doesn't " +
"support Pageable and Sort method parameters");
}
Set<String> usedNamedParameters = extractNamedParameters(queryString);
if (!usedNamedParameters.isEmpty()) {
Set<String> missingParameters = new LinkedHashSet<>(usedNamedParameters);
missingParameters.removeAll(namedParameterToIndex.keySet());
if (!missingParameters.isEmpty()) {
throw new IllegalArgumentException(
method.name() + " of Repository " + repositoryClassInfo
+ " is missing the named parameters " + missingParameters
+ ", provided are " + namedParameterToIndex.keySet()
+ ". Ensure that the parameters are correctly annotated with @Param.");
}
namedParameterToIndex.keySet().retainAll(usedNamedParameters);
} else {
namedParameterToIndex.clear();
}
DotName methodReturnTypeDotName = method.returnType().name();
try (MethodCreator methodCreator = classCreator.getMethodCreator(method.name(), methodReturnTypeDotName.toString(),
methodParameterTypesStr)) {
if (isModifying) {
methodCreator.addAnnotation(Transactional.class);
AnnotationInstance modifyingAnnotation = method.annotation(DotNames.SPRING_DATA_MODIFYING);
handleFlushAutomatically(modifyingAnnotation, methodCreator, entityClassFieldDescriptor);
if (queryString.toLowerCase().startsWith("delete")) {
if (!DotNames.PRIMITIVE_LONG.equals(methodReturnTypeDotName)
&& !DotNames.LONG.equals(methodReturnTypeDotName)
&& !DotNames.VOID.equals(methodReturnTypeDotName)) {
throw new IllegalArgumentException(
method.name() + " of Repository " + repositoryClassInfo
+ " is meant to be a delete query and can therefore only have a void or long return type");
}
// we need to strip 'delete' or else JpaOperations.delete will generate the wrong query
String deleteQueryString = queryString.substring("delete".length());
ResultHandle deleteCount;
if (!namedParameterToIndex.isEmpty()) {
ResultHandle parameters = generateParametersObject(namedParameterToIndex, methodCreator);
// call JpaOperations.delete
deleteCount = methodCreator.invokeVirtualMethod(
MethodDescriptor.ofMethod(AbstractJpaOperations.class, "delete", long.class,
Class.class, String.class, Parameters.class),
methodCreator.readStaticField(operationsField),
methodCreator.readInstanceField(entityClassFieldDescriptor, methodCreator.getThis()),
methodCreator.load(deleteQueryString), parameters);
} else {
ResultHandle paramsArray = generateParamsArray(queryParameterIndexes, methodCreator);
// call JpaOperations.delete
deleteCount = methodCreator.invokeVirtualMethod(
MethodDescriptor.ofMethod(AbstractJpaOperations.class, "delete", long.class,
Class.class, String.class, Object[].class),
methodCreator.readStaticField(operationsField),
methodCreator.readInstanceField(entityClassFieldDescriptor, methodCreator.getThis()),
methodCreator.load(deleteQueryString), paramsArray);
}
handleClearAutomatically(modifyingAnnotation, methodCreator, entityClassFieldDescriptor);
if (DotNames.VOID.equals(methodReturnTypeDotName)) {
methodCreator.returnValue(null);
}
handleLongReturnValue(methodCreator, deleteCount, methodReturnTypeDotName);
} else if (queryString.toLowerCase().startsWith("update")) {
if (!DotNames.PRIMITIVE_INTEGER.equals(methodReturnTypeDotName)
&& !DotNames.INTEGER.equals(methodReturnTypeDotName)
&& !DotNames.VOID.equals(methodReturnTypeDotName)) {
throw new IllegalArgumentException(
method.name() + " of Repository " + repositoryClassInfo
+ " is meant to be an update query and can therefore only have a void or integer return type");
}
ResultHandle updateCount;
if (!namedParameterToIndex.isEmpty()) {
ResultHandle parameters = generateParametersObject(namedParameterToIndex, methodCreator);
ResultHandle parametersMap = methodCreator.invokeVirtualMethod(
MethodDescriptor.ofMethod(Parameters.class, "map", Map.class),
parameters);
// call JpaOperations.executeUpdate
updateCount = methodCreator.invokeVirtualMethod(
MethodDescriptor.ofMethod(AbstractJpaOperations.class, "executeUpdate", int.class,
String.class, Map.class),
methodCreator.readStaticField(operationsField),
methodCreator.load(queryString),
parametersMap);
} else {
ResultHandle paramsArray = generateParamsArray(queryParameterIndexes, methodCreator);
// call JpaOperations.executeUpdate
updateCount = methodCreator.invokeVirtualMethod(
MethodDescriptor.ofMethod(AbstractJpaOperations.class, "executeUpdate",
int.class, String.class, Object[].class),
methodCreator.readStaticField(operationsField),
methodCreator.load(queryString),
paramsArray);
}
handleClearAutomatically(modifyingAnnotation, methodCreator, entityClassFieldDescriptor);
if (DotNames.VOID.equals(methodReturnTypeDotName)) {
methodCreator.returnValue(null);
}
handleIntegerReturnValue(methodCreator, updateCount, methodReturnTypeDotName);
} else {
throw new IllegalArgumentException(
method.name() + " of Repository " + repositoryClassInfo
+ " has been annotated with @Modifying but the @Query does not appear to be " +
"a delete or update query");
}
} else {
// by default just hope that adding select count(*) will do
String countQueryString = "SELECT COUNT(*) " + queryString;
if (queryInstance != null && queryInstance.value(QUERY_COUNT_FIELD) != null) { // if a countQuery is specified, use it
countQueryString = queryInstance.value(QUERY_COUNT_FIELD).asString().trim();
} else {
// otherwise try and derive the select query from the method name and use that to construct the count query
MethodNameParser methodNameParser = new MethodNameParser(repositoryClassInfo, index);
try {
MethodNameParser.Result parseResult = methodNameParser.parse(method);
if (MethodNameParser.QueryType.SELECT == parseResult.getQueryType()) {
countQueryString = "SELECT COUNT (*) " + parseResult.getQuery();
}
} catch (Exception ignored) {
// we just ignore the exception if the method does not match one of the supported styles
}
}
// Find the type of data used in the result
// e.g. method.returnType() is a List that may contain non-entity elements
Type resultType = verifyQueryResultType(method.returnType(), index);
DotName customResultTypeName = resultType.name();
if (customResultTypeName.equals(entityClassInfo.name())
|| customResultTypeName.toString().equals(idTypeStr)
|| isHibernateSupportedReturnType(customResultTypeName)
|| getFieldTypeNames(entityClassInfo, entityFieldTypeNames).contains(customResultTypeName)) {
// no special handling needed
customResultTypeName = null;
} else {
// The result is using a custom type.
List<String> fieldNames = getFieldNames(queryString);
// If the custom type is an interface, we need to generate the implementation
ClassInfo resultClassInfo = index.getClassByName(customResultTypeName);
if (Modifier.isInterface(resultClassInfo.flags())) {
// Find the implementation name, and use that for subsequent query result generation
customResultTypeName = customResultTypeNames.computeIfAbsent(customResultTypeName,
(k) -> createSimpleInterfaceImpl(k, entityClassInfo.name()));
// Remember the parameters for this usage of the custom type, we'll deal with it later
customResultTypes.computeIfAbsent(customResultTypeName,
k -> new HashMap<>()).put(methodName, fieldNames);
} else {
throw new IllegalArgumentException(
"Query annotations may only use interfaces to map results to non-entity types. "
+ "Offending query string is \"" + queryString + "\" on method " + methodName
+ " of Repository " + repositoryName);
}
}
ResultHandle panacheQuery;
if (!namedParameterToIndex.isEmpty()) {
ResultHandle parameters = generateParametersObject(namedParameterToIndex, methodCreator);
// call JpaOperations.find()
panacheQuery = methodCreator.invokeStaticMethod(
MethodDescriptor.ofMethod(AdditionalJpaOperations.class, "find",
PanacheQuery.class, AbstractJpaOperations.class, Class.class, String.class,
String.class, io.quarkus.panache.common.Sort.class, Parameters.class),
methodCreator.readStaticField(operationsField),
methodCreator.readInstanceField(entityClassFieldDescriptor, methodCreator.getThis()),
methodCreator.load(queryString), methodCreator.load(countQueryString),
generateSort(sortParameterIndex, pageableParameterIndex, methodCreator), parameters);
} else {
ResultHandle paramsArray = generateParamsArray(queryParameterIndexes, methodCreator);
// call JpaOperations.find()
panacheQuery = methodCreator.invokeStaticMethod(
MethodDescriptor.ofMethod(AdditionalJpaOperations.class, "find",
PanacheQuery.class, AbstractJpaOperations.class, Class.class, String.class,
String.class, io.quarkus.panache.common.Sort.class, Object[].class),
methodCreator.readStaticField(operationsField),
methodCreator.readInstanceField(entityClassFieldDescriptor, methodCreator.getThis()),
methodCreator.load(queryString), methodCreator.load(countQueryString),
generateSort(sortParameterIndex, pageableParameterIndex, methodCreator), paramsArray);
}
generateFindQueryResultHandling(methodCreator, panacheQuery, pageableParameterIndex, repositoryClassInfo,
entityClassInfo, methodReturnTypeDotName, null, method.name(), customResultTypeName,
Object[].class.getName());
}
}
}
for (Map.Entry<DotName, DotName> mapping : customResultTypeNames.entrySet()) {
DotName interfaceName = mapping.getKey();
DotName implName = mapping.getValue();
generateCustomResultTypes(interfaceName, implName, customResultTypes.get(implName));
customClassCreatedCallback.accept(implName.toString());
}
}
private Set<String> extractNamedParameters(String queryString) {
Set<String> namedParameters = new LinkedHashSet<>();
final Matcher matcher = NAMED_PARAMETER.matcher(queryString);
while (matcher.find()) {
namedParameters.add(matcher.group(1));
}
return namedParameters;
}
// we currently only support the 'value' attribute of @Query
private void verifyQueryAnnotation(AnnotationInstance queryInstance, String methodName, String repositoryName) {
List<AnnotationValue> values = queryInstance.values();
for (AnnotationValue value : values) {
if (!QUERY_VALUE_FIELD.equals(value.name()) && !QUERY_COUNT_FIELD.equals(value.name())) {
throw new IllegalArgumentException("Attribute " + value.name() + " of @Query is currently not supported. " +
"Offending method is " + methodName + " of Repository " + repositoryName);
}
}
if (queryInstance.value(QUERY_VALUE_FIELD) == null) {
throw new IllegalArgumentException("'value' attribute must be specified on @Query annotation of method. " +
"Offending method is " + methodName + " of Repository " + repositoryName);
}
}
private ResultHandle generateParamsArray(List<Integer> queryParameterIndexes, MethodCreator methodCreator) {
ResultHandle paramsArray = methodCreator.newArray(Object.class, queryParameterIndexes.size());
for (int i = 0; i < queryParameterIndexes.size(); i++) {
methodCreator.writeArrayValue(paramsArray, methodCreator.load(i),
methodCreator.getMethodParam(queryParameterIndexes.get(i)));
}
return paramsArray;
}
private ResultHandle generateParametersObject(Map<String, Integer> namedParameterToIndex, MethodCreator methodCreator) {
ResultHandle parameters = methodCreator.newInstance(MethodDescriptor.ofConstructor(Parameters.class));
for (Map.Entry<String, Integer> entry : namedParameterToIndex.entrySet()) {
methodCreator.invokeVirtualMethod(
MethodDescriptor.ofMethod(Parameters.class, "and", Parameters.class,
String.class, Object.class),
parameters, methodCreator.load(entry.getKey()), methodCreator.getMethodParam(entry.getValue()));
}
return parameters;
}
// ensure that Sort is correctly handled whether it's specified from the method name or a method param
private ResultHandle generateSort(Integer sortParameterIndex, Integer pageableParameterIndex, MethodCreator methodCreator) {
ResultHandle sort = methodCreator.loadNull();
if (sortParameterIndex != null) {
sort = methodCreator.invokeStaticMethod(
MethodDescriptor.ofMethod(TypesConverter.class, "toPanacheSort",
io.quarkus.panache.common.Sort.class,
org.springframework.data.domain.Sort.class),
methodCreator.getMethodParam(sortParameterIndex));
} else if (pageableParameterIndex != null) {
sort = methodCreator.invokeStaticMethod(
MethodDescriptor.ofMethod(TypesConverter.class, "pageToPanacheSort",
io.quarkus.panache.common.Sort.class,
org.springframework.data.domain.Pageable.class),
methodCreator.getMethodParam(pageableParameterIndex));
}
return sort;
}
private List<String> getFieldNames(String queryString) {
Matcher matcher = SELECT_CLAUSE.matcher(queryString);
if (matcher.find()) {
String selectClause = matcher.group(1).trim();
String[] fields = selectClause.split("\\s*,\\s+");
List<String> fieldNames = new ArrayList<>(fields.length);
for (String name : fields) {
Matcher m = FIELD_ALIAS.matcher(name);
if (m.matches()) {
name = m.group(1);
} else {
Matcher n = FIELD_NAME.matcher(name); // (\\w+).*
if (n.matches()) {
name = n.group(1);
}
}
fieldNames.add(name.toLowerCase());
}
return fieldNames;
}
return Collections.emptyList();
}
private void generateCustomResultTypes(DotName interfaceName, DotName implName, Map<String, List<String>> queryMethods) {
ClassInfo interfaceInfo = index.getClassByName(interfaceName);
try (ClassCreator implClassCreator = ClassCreator.builder().classOutput(nonBeansClassOutput)
.interfaces(interfaceName.toString()).className(implName.toString())
.build()) {
Map<String, FieldDescriptor> fields = new HashMap<>(3);
for (MethodInfo method : interfaceInfo.methods()) {
String getterName = method.name();
String propertyName = JavaBeanUtil.getPropertyNameFromGetter(getterName);
Type returnType = method.returnType();
if (returnType.kind() == Type.Kind.VOID) {
throw new IllegalArgumentException("Method " + method.name() + " of interface " + interfaceName
+ " is not a getter method since it returns void");
}
DotName fieldTypeName = getPrimitiveTypeName(returnType.name());
FieldDescriptor field = implClassCreator.getFieldCreator(propertyName, fieldTypeName.toString())
.getFieldDescriptor();
// create getter (based on the interface)
try (MethodCreator getter = implClassCreator.getMethodCreator(getterName, returnType.name().toString())) {
getter.setModifiers(Modifier.PUBLIC);
getter.returnValue(getter.readInstanceField(field, getter.getThis()));
}
fields.put(propertyName.toLowerCase(), field);
}
// Add static methods to convert from Object[] to this type
for (Map.Entry<String, List<String>> queryMethod : queryMethods.entrySet()) {
try (MethodCreator convert = implClassCreator.getMethodCreator("convert_" + queryMethod.getKey(),
implName.toString(), Object[].class.getName())) {
convert.setModifiers(Modifier.STATIC | Modifier.PUBLIC);
ResultHandle newObject = convert.newInstance(MethodDescriptor.ofConstructor(implName.toString()));
// Use field names in the query-declared order
List<String> queryNames = queryMethod.getValue();
// Object[] is the only parameter: values are in column/declared order
ResultHandle array = convert.getMethodParam(0);
for (int i = 0; i < queryNames.size(); i++) {
FieldDescriptor f = fields.get(queryNames.get(i));
if (f == null) {
throw new IllegalArgumentException("@Query annotation for " + queryMethod.getKey()
+ " does not use fields from " + interfaceName);
} else {
convert.writeInstanceField(f, newObject,
castReturnValue(convert, convert.readArrayValue(array, i), f.getType()));
}
}
convert.returnValue(newObject);
}
}
}
}
private ResultHandle castReturnValue(MethodCreator methodCreator, ResultHandle resultHandle, String type) {
switch (type) {
case "I":
resultHandle = methodCreator.invokeStaticMethod(
MethodDescriptor.ofMethod(Integer.class, "valueOf", Integer.class, int.class),
resultHandle);
break;
case "J":
resultHandle = methodCreator.invokeStaticMethod(
MethodDescriptor.ofMethod(Long.class, "valueOf", Long.class, long.class),
resultHandle);
break;
}
return resultHandle;
}
private Set<DotName> getFieldTypeNames(ClassInfo entityClassInfo, Set<DotName> entityFieldTypeNames) {
if (entityFieldTypeNames.isEmpty()) {
entityClassInfo.fields().stream()
.filter(not(fieldInfo -> Modifier.isStatic(fieldInfo.flags())))
.filter(not(FieldInfo::isSynthetic))
.filter(not(fieldInfo -> fieldInfo.hasAnnotation(DotNames.JPA_TRANSIENT)))
.map(fieldInfo -> fieldInfo.type().name())
.forEach(entityFieldTypeNames::add);
// recurse until we reached Object
Type superClassType = entityClassInfo.superClassType();
if (superClassType != null && !superClassType.name().equals(DotNames.OBJECT)) {
var superEntityClassInfo = index.getClassByName(superClassType.name());
entityFieldTypeNames.addAll(getFieldTypeNames(superEntityClassInfo, new HashSet<>()));
}
}
return entityFieldTypeNames;
}
}
| CustomQueryMethodsAdder |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestSnapshotCommands.java | {
"start": 1452,
"end": 12373
} | class ____ {
private static Configuration conf;
private static MiniDFSCluster cluster;
private static DistributedFileSystem fs;
@BeforeAll
public static void clusterSetUp() throws IOException {
conf = new HdfsConfiguration();
conf.setInt(DFSConfigKeys.DFS_NAMENODE_SNAPSHOT_MAX_LIMIT, 3);
cluster = new MiniDFSCluster.Builder(conf).build();
cluster.waitActive();
fs = cluster.getFileSystem();
}
@AfterAll
public static void clusterShutdown() throws IOException{
if(fs != null){
fs.close();
}
if(cluster != null){
cluster.shutdown();
}
}
@BeforeEach
public void setUp() throws IOException {
fs.mkdirs(new Path("/sub1"));
fs.mkdirs(new Path("/Fully/QPath"));
fs.allowSnapshot(new Path("/sub1"));
fs.mkdirs(new Path("/sub1/sub1sub1"));
fs.mkdirs(new Path("/sub1/sub1sub2"));
}
@AfterEach
public void tearDown() throws IOException {
if (fs.exists(new Path("/sub1"))) {
if (fs.exists(new Path("/sub1/.snapshot"))) {
for (FileStatus st : fs.listStatus(new Path("/sub1/.snapshot"))) {
fs.deleteSnapshot(new Path("/sub1"), st.getPath().getName());
}
fs.disallowSnapshot(new Path("/sub1"));
}
fs.delete(new Path("/sub1"), true);
}
}
@Test
public void testAllowSnapshot() throws Exception {
// Idempotent test
DFSTestUtil.DFSAdminRun("-allowSnapshot /sub1", 0,
"Allowing snapshot " + "on /sub1 succeeded", conf);
// allow normal dir success
DFSTestUtil.FsShellRun("-mkdir /sub2", conf);
DFSTestUtil.DFSAdminRun("-allowSnapshot /sub2", 0,
"Allowing snapshot " + "on /sub2 succeeded", conf);
// allow non-exists dir failed
DFSTestUtil.DFSAdminRun("-allowSnapshot /sub3", -1, null, conf);
}
@Test
public void testCreateSnapshot() throws Exception {
// test createSnapshot
DFSTestUtil.FsShellRun("-createSnapshot /sub1 sn0", 0, "Created snapshot /sub1/.snapshot/sn0", conf);
DFSTestUtil.FsShellRun("-createSnapshot /sub1 sn0", 1, "there is already a snapshot with the same name \"sn0\"", conf);
DFSTestUtil.FsShellRun("-rmr /sub1/sub1sub2", conf);
DFSTestUtil.FsShellRun("-mkdir /sub1/sub1sub3", conf);
DFSTestUtil.FsShellRun("-createSnapshot /sub1 sn1", 0, "Created snapshot /sub1/.snapshot/sn1", conf);
// check snapshot contents
DFSTestUtil.FsShellRun("-ls /sub1", 0, "/sub1/sub1sub1", conf);
DFSTestUtil.FsShellRun("-ls /sub1", 0, "/sub1/sub1sub3", conf);
DFSTestUtil.FsShellRun("-ls /sub1/.snapshot", 0, "/sub1/.snapshot/sn0", conf);
DFSTestUtil.FsShellRun("-ls /sub1/.snapshot", 0, "/sub1/.snapshot/sn1", conf);
DFSTestUtil.FsShellRun("-ls /sub1/.snapshot/sn0", 0, "/sub1/.snapshot/sn0/sub1sub1", conf);
DFSTestUtil.FsShellRun("-ls /sub1/.snapshot/sn0", 0, "/sub1/.snapshot/sn0/sub1sub2", conf);
DFSTestUtil.FsShellRun("-ls /sub1/.snapshot/sn1", 0, "/sub1/.snapshot/sn1/sub1sub1", conf);
DFSTestUtil.FsShellRun("-ls /sub1/.snapshot/sn1", 0, "/sub1/.snapshot/sn1/sub1sub3", conf);
}
@Test
public void testMaxSnapshotLimit() throws Exception {
DFSTestUtil.FsShellRun("-mkdir /sub3", conf);
DFSTestUtil.DFSAdminRun("-allowSnapshot /sub3", 0,
"Allowing snapshot " + "on /sub3 succeeded", conf);
// test createSnapshot
DFSTestUtil.FsShellRun("-createSnapshot /sub3 sn0", 0,
"Created snapshot /sub3/.snapshot/sn0", conf);
DFSTestUtil.FsShellRun("-createSnapshot /sub3 sn1", 0,
"Created snapshot /sub3/.snapshot/sn1", conf);
DFSTestUtil.FsShellRun("-createSnapshot /sub3 sn2", 0,
"Created snapshot /sub3/.snapshot/sn2", conf);
DFSTestUtil.FsShellRun("-createSnapshot /sub3 sn3", 1,
"Failed to create snapshot: there are already 3 snapshot(s) and "
+ "the per directory snapshot limit is 3", conf);
}
@Test
public void testMkdirUsingReservedName() throws Exception {
// test can not create dir with reserved name: .snapshot
DFSTestUtil.FsShellRun("-ls /", conf);
DFSTestUtil.FsShellRun("-mkdir /.snapshot", 1, "File exists", conf);
DFSTestUtil.FsShellRun("-mkdir /sub1/.snapshot", 1, "File exists", conf);
// mkdir -p ignore reserved name check if dir already exists
DFSTestUtil.FsShellRun("-mkdir -p /sub1/.snapshot", conf);
DFSTestUtil.FsShellRun("-mkdir -p /sub1/sub1sub1/.snapshot", 1, "mkdir: \".snapshot\" is a reserved name.", conf);
}
@Test
public void testRenameSnapshot() throws Exception {
DFSTestUtil.FsShellRun("-createSnapshot /sub1 sn.orig", conf);
DFSTestUtil.FsShellRun("-renameSnapshot /sub1 sn.orig sn.rename", conf);
DFSTestUtil.FsShellRun("-ls /sub1/.snapshot", 0, "/sub1/.snapshot/sn.rename", conf);
DFSTestUtil.FsShellRun("-ls /sub1/.snapshot/sn.rename", 0, "/sub1/.snapshot/sn.rename/sub1sub1", conf);
DFSTestUtil.FsShellRun("-ls /sub1/.snapshot/sn.rename", 0, "/sub1/.snapshot/sn.rename/sub1sub2", conf);
//try renaming from a non-existing snapshot
DFSTestUtil.FsShellRun("-renameSnapshot /sub1 sn.nonexist sn.rename", 1,
"renameSnapshot: The snapshot sn.nonexist does not exist for directory /sub1", conf);
//try renaming a non-existing snapshot to itself
DFSTestUtil.FsShellRun("-renameSnapshot /sub1 sn.nonexist sn.nonexist", 1,
"renameSnapshot: The snapshot sn.nonexist " +
"does not exist for directory /sub1", conf);
//try renaming to existing snapshots
DFSTestUtil.FsShellRun("-createSnapshot /sub1 sn.new", conf);
DFSTestUtil.FsShellRun("-renameSnapshot /sub1 sn.new sn.rename", 1,
"renameSnapshot: The snapshot sn.rename already exists for directory /sub1", conf);
DFSTestUtil.FsShellRun("-renameSnapshot /sub1 sn.rename sn.new", 1,
"renameSnapshot: The snapshot sn.new already exists for directory /sub1", conf);
}
@Test
public void testDeleteSnapshot() throws Exception {
DFSTestUtil.FsShellRun("-createSnapshot /sub1 sn1", conf);
DFSTestUtil.FsShellRun("-deleteSnapshot /sub1 sn1", conf);
DFSTestUtil.FsShellRun("-deleteSnapshot /sub1 sn1", 1,
"deleteSnapshot: Cannot delete snapshot sn1 from path /sub1: the snapshot does not exist.", conf);
}
@Test
public void testDisallowSnapshot() throws Exception {
DFSTestUtil.FsShellRun("-createSnapshot /sub1 sn1", conf);
// cannot delete snapshotable dir
DFSTestUtil.FsShellRun("-rmr /sub1", 1, "The directory /sub1 cannot be deleted since /sub1 is snapshottable and already has snapshots", conf);
DFSTestUtil.DFSAdminRun("-disallowSnapshot /sub1", -1,
"disallowSnapshot: The directory /sub1 has snapshot(s). Please redo the operation after removing all the snapshots.", conf);
DFSTestUtil.FsShellRun("-deleteSnapshot /sub1 sn1", conf);
DFSTestUtil.DFSAdminRun("-disallowSnapshot /sub1", 0,
"Disallowing snapshot on /sub1 succeeded", conf);
// Idempotent test
DFSTestUtil.DFSAdminRun("-disallowSnapshot /sub1", 0,
"Disallowing snapshot on /sub1 succeeded", conf);
// now it can be deleted
DFSTestUtil.FsShellRun("-rmr /sub1", conf);
}
@Test
@Timeout(value = 60)
public void testSnapshotCommandsWithURI()throws Exception {
Configuration config = new HdfsConfiguration();
//fs.defaultFS should not be used, when path is fully qualified.
config.set("fs.defaultFS", "hdfs://127.0.0.1:1024");
String path = fs.getUri() + "/Fully/QPath";
DFSTestUtil.DFSAdminRun("-allowSnapshot " + path, 0,
"Allowing snapshot on " + path + " succeeded", config);
DFSTestUtil.FsShellRun("-createSnapshot " + path + " sn1", config);
// create file1
DFSTestUtil
.createFile(fs, new Path(fs.getUri() + "/Fully/QPath/File1"), 1024,
(short) 1, 100);
// create file2
DFSTestUtil
.createFile(fs, new Path(fs.getUri() + "/Fully/QPath/File2"), 1024,
(short) 1, 100);
DFSTestUtil.FsShellRun("-createSnapshot " + path + " sn2", config);
// verify the snapshotdiff using api and command line
SnapshotDiffReport report =
fs.getSnapshotDiffReport(new Path(path), "sn1", "sn2");
DFSTestUtil.toolRun(new SnapshotDiff(config), path + " sn1 sn2", 0,
report.toString());
DFSTestUtil.FsShellRun("-renameSnapshot " + path + " sn2 sn3", config);
DFSTestUtil.FsShellRun("-deleteSnapshot " + path + " sn1", config);
DFSTestUtil.FsShellRun("-deleteSnapshot " + path + " sn3", config);
DFSTestUtil.DFSAdminRun("-disallowSnapshot " + path, 0,
"Disallowing snapshot on " + path + " succeeded", config);
fs.delete(new Path("/Fully/QPath"), true);
}
@Test
@Timeout(value = 120)
public void testSnapshotDiff()throws Exception {
Configuration config = new HdfsConfiguration();
Path snapDirPath = new Path(fs.getUri().toString() + "/snap_dir");
String snapDir = snapDirPath.toString();
fs.mkdirs(snapDirPath);
DFSTestUtil.DFSAdminRun("-allowSnapshot " + snapDirPath, 0,
"Allowing snapshot on " + snapDirPath + " succeeded", config);
DFSTestUtil.createFile(fs, new Path(snapDirPath, "file1"),
1024, (short) 1, 100);
DFSTestUtil.FsShellRun("-createSnapshot " + snapDirPath + " sn1", config);
DFSTestUtil.createFile(fs, new Path(snapDirPath, "file2"),
1024, (short) 1, 100);
DFSTestUtil.createFile(fs, new Path(snapDirPath, "file3"),
1024, (short) 1, 100);
DFSTestUtil.FsShellRun("-createSnapshot " + snapDirPath + " sn2", config);
// verify the snapshot diff using api and command line
SnapshotDiffReport report_s1_s2 =
fs.getSnapshotDiffReport(snapDirPath, "sn1", "sn2");
DFSTestUtil.toolRun(new SnapshotDiff(config), snapDir +
" sn1 sn2", 0, report_s1_s2.toString());
DFSTestUtil.FsShellRun("-renameSnapshot " + snapDirPath + " sn2 sn3",
config);
SnapshotDiffReport report_s1_s3 =
fs.getSnapshotDiffReport(snapDirPath, "sn1", "sn3");
DFSTestUtil.toolRun(new SnapshotDiff(config), snapDir +
" sn1 sn3", 0, report_s1_s3.toString());
// Creating 100 more files so as to force DiffReport generation
// backend ChunkedArrayList to create multiple chunks.
for (int i = 0; i < 100; i++) {
DFSTestUtil.createFile(fs, new Path(snapDirPath, "file_" + i),
1, (short) 1, 100);
}
DFSTestUtil.FsShellRun("-createSnapshot " + snapDirPath + " sn4", config);
DFSTestUtil.toolRun(new SnapshotDiff(config), snapDir +
" sn1 sn4", 0, null);
DFSTestUtil.FsShellRun("-deleteSnapshot " + snapDir + " sn1", config);
DFSTestUtil.FsShellRun("-deleteSnapshot " + snapDir + " sn3", config);
DFSTestUtil.FsShellRun("-deleteSnapshot " + snapDir + " sn4", config);
DFSTestUtil.DFSAdminRun("-disallowSnapshot " + snapDir, 0,
"Disallowing snapshot on " + snapDirPath + " succeeded", config);
fs.delete(new Path("/Fully/QPath"), true);
}
}
| TestSnapshotCommands |
java | spring-projects__spring-security | core/src/main/java/org/springframework/security/jackson/SecurityJacksonModules.java | {
"start": 1634,
"end": 2385
} | class ____.
*
* <p>
* <pre>
* ClassLoader loader = getClass().getClassLoader();
* JsonMapper mapper = JsonMapper.builder()
* .addModules(SecurityJacksonModules.getModules(loader))
* .build();
* </pre>
*
* If needed, you can add custom classes to the validation handling.
* <p>
* <pre>
* ClassLoader loader = getClass().getClassLoader();
* BasicPolymorphicTypeValidator.Builder builder = BasicPolymorphicTypeValidator.builder()
* .allowIfSubType(MyCustomType.class);
* JsonMapper mapper = JsonMapper.builder()
* .addModules(SecurityJacksonModules.getModules(loader, builder))
* .build();
* </pre>
*
* @author Sebastien Deleuze
* @author Jitendra Singh
* @since 7.0
*/
public final | names |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/type/descriptor/java/ZonedDateTimeJavaType.java | {
"start": 1032,
"end": 6873
} | class ____ extends AbstractTemporalJavaType<ZonedDateTime> implements VersionJavaType<ZonedDateTime> {
/**
* Singleton access
*/
public static final ZonedDateTimeJavaType INSTANCE = new ZonedDateTimeJavaType();
public ZonedDateTimeJavaType() {
super( ZonedDateTime.class, ImmutableMutabilityPlan.instance(), ZonedDateTimeComparator.INSTANCE );
}
@Override
public boolean isInstance(Object value) {
return value instanceof ZonedDateTime;
}
@Override
public TemporalType getPrecision() {
return TemporalType.TIMESTAMP;
}
@Override
public JdbcType getRecommendedJdbcType(JdbcTypeIndicators stdIndicators) {
if ( stdIndicators.isPreferJavaTimeJdbcTypesEnabled() ) {
return stdIndicators.getJdbcType( SqlTypes.ZONED_DATE_TIME );
}
return stdIndicators.getJdbcType( stdIndicators.getDefaultZonedTimestampSqlType() );
}
@Override @SuppressWarnings("unchecked")
protected <X> TemporalJavaType<X> forTimestampPrecision(TypeConfiguration typeConfiguration) {
return (TemporalJavaType<X>) this;
}
@Override
public boolean useObjectEqualsHashCode() {
return true;
}
@Override
public String toString(ZonedDateTime value) {
return ISO_ZONED_DATE_TIME.format( value );
}
@Override
public ZonedDateTime fromString(CharSequence string) {
return ZonedDateTime.from( ISO_ZONED_DATE_TIME.parse( string ) );
}
@Override
@SuppressWarnings("unchecked")
public <X> X unwrap(ZonedDateTime zonedDateTime, Class<X> type, WrapperOptions options) {
if ( zonedDateTime == null ) {
return null;
}
if ( ZonedDateTime.class.isAssignableFrom( type ) ) {
return (X) zonedDateTime;
}
if ( OffsetDateTime.class.isAssignableFrom( type ) ) {
return (X) OffsetDateTime.of( zonedDateTime.toLocalDateTime(), zonedDateTime.getOffset() );
}
if ( Instant.class.isAssignableFrom( type ) ) {
return (X) zonedDateTime.toInstant();
}
if ( Calendar.class.isAssignableFrom( type ) ) {
return (X) GregorianCalendar.from( zonedDateTime );
}
if ( Timestamp.class.isAssignableFrom( type ) ) {
/*
* This works around two bugs:
* - HHH-13266 (JDK-8061577): around and before 1900,
* the number of milliseconds since the epoch does not mean the same thing
* for java.util and java.time, so conversion must be done using the year, month, day, hour, etc.
* - HHH-13379 (JDK-4312621): after 1908 (approximately),
* Daylight Saving Time introduces ambiguity in the year/month/day/hour/etc representation once a year
* (on DST end), so conversion must be done using the number of milliseconds since the epoch.
* - around 1905, both methods are equally valid, so we don't really care which one is used.
*/
if ( zonedDateTime.getYear() < 1905 ) {
return (X) Timestamp.valueOf(
zonedDateTime.withZoneSameInstant( ZoneId.systemDefault() ).toLocalDateTime()
);
}
else {
return (X) Timestamp.from( zonedDateTime.toInstant() );
}
}
if ( java.sql.Date.class.isAssignableFrom( type ) ) {
return (X) java.sql.Date.from( zonedDateTime.toInstant() );
}
if ( java.sql.Time.class.isAssignableFrom( type ) ) {
return (X) java.sql.Time.from( zonedDateTime.toInstant() );
}
if ( Date.class.isAssignableFrom( type ) ) {
return (X) Date.from( zonedDateTime.toInstant() );
}
if ( Long.class.isAssignableFrom( type ) ) {
return (X) Long.valueOf( zonedDateTime.toInstant().toEpochMilli() );
}
throw unknownUnwrap( type );
}
@Override
public <X> ZonedDateTime wrap(X value, WrapperOptions options) {
if ( value == null ) {
return null;
}
if (value instanceof ZonedDateTime zonedDateTime) {
return zonedDateTime;
}
if (value instanceof OffsetDateTime offsetDateTime) {
return offsetDateTime.toZonedDateTime();
}
if (value instanceof Instant instant) {
return instant.atZone( ZoneOffset.UTC );
}
if (value instanceof Timestamp timestamp) {
/*
* This works around two bugs:
* - HHH-13266 (JDK-8061577): around and before 1900,
* the number of milliseconds since the epoch does not mean the same thing
* for java.util and java.time, so conversion must be done using the year, month, day, hour, etc.
* - HHH-13379 (JDK-4312621): after 1908 (approximately),
* Daylight Saving Time introduces ambiguity in the year/month/day/hour/etc representation once a year
* (on DST end), so conversion must be done using the number of milliseconds since the epoch.
* - around 1905, both methods are equally valid, so we don't really care which one is used.
*/
if ( timestamp.getYear() < 5 ) { // Timestamp year 0 is 1900
return timestamp.toLocalDateTime().atZone( ZoneId.systemDefault() );
}
else {
return timestamp.toInstant().atZone( ZoneId.systemDefault() );
}
}
if (value instanceof Date date) {
return ZonedDateTime.ofInstant( date.toInstant(), ZoneId.systemDefault() );
}
if (value instanceof Long longValue) {
return ZonedDateTime.ofInstant( Instant.ofEpochMilli( longValue ), ZoneId.systemDefault() );
}
if (value instanceof Calendar calendar) {
return ZonedDateTime.ofInstant( calendar.toInstant(), calendar.getTimeZone().toZoneId() );
}
throw unknownWrap( value.getClass() );
}
@Override
public int getDefaultSqlPrecision(Dialect dialect, JdbcType jdbcType) {
return dialect.getDefaultTimestampPrecision();
}
@Override
public ZonedDateTime seed(Long length, Integer precision, Integer scale, SharedSessionContractImplementor session) {
return ZonedDateTime.now( ClockHelper.forPrecision( precision, session ) );
}
@Override
public ZonedDateTime next(
ZonedDateTime current,
Long length,
Integer precision,
Integer scale,
SharedSessionContractImplementor session) {
return ZonedDateTime.now( ClockHelper.forPrecision( precision, session ) );
}
}
| ZonedDateTimeJavaType |
java | apache__dubbo | dubbo-config/dubbo-config-api/src/test/java/org/apache/dubbo/config/integration/single/SingleRegistryCenterIntegrationService.java | {
"start": 941,
"end": 1026
} | interface ____ {
String hello(String name);
}
| SingleRegistryCenterIntegrationService |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/RoutingInfo.java | {
"start": 950,
"end": 6875
} | class ____ implements ToXContentObject, Writeable {
private static final ParseField CURRENT_ALLOCATIONS = new ParseField("current_allocations");
private static final ParseField TARGET_ALLOCATIONS = new ParseField("target_allocations");
private static final ParseField ROUTING_STATE = new ParseField("routing_state");
private static final ParseField REASON = new ParseField("reason");
private static final ConstructingObjectParser<RoutingInfo, Void> PARSER = new ConstructingObjectParser<>(
"trained_model_routing_state",
a -> new RoutingInfo((Integer) a[0], (Integer) a[1], RoutingState.fromString((String) a[2]), (String) a[3])
);
static {
PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), CURRENT_ALLOCATIONS);
PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), TARGET_ALLOCATIONS);
PARSER.declareString(ConstructingObjectParser.constructorArg(), ROUTING_STATE);
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), REASON);
}
public static RoutingInfo fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
private final int currentAllocations;
private final int targetAllocations;
private final RoutingState state;
private final String reason;
// There may be objects in cluster state prior to 8.4 that do not contain values for currentAllocations and targetAllocations.
private RoutingInfo(
@Nullable Integer currentAllocations,
@Nullable Integer targetAllocations,
RoutingState state,
@Nullable String reason
) {
this(currentAllocations == null ? 0 : currentAllocations, targetAllocations == null ? 0 : targetAllocations, state, reason);
}
/**
* RoutingInfo defines the state of a particular trained model assignment on a particular node.
* @param currentAllocations The number of allocations currently running on a node.
* @param targetAllocations The number of allocations that have been assigned to a node, and will run on the node. Should never be
* higher than the number of available processors on the node.
* @param state Indicates the availability of the allocations on the node.
* @param reason Will contain the reason that currentAllocations != targetAllocations, if applicable, otherwise empty string.
*/
public RoutingInfo(int currentAllocations, int targetAllocations, RoutingState state, String reason) {
this.currentAllocations = currentAllocations;
this.targetAllocations = targetAllocations;
this.state = ExceptionsHelper.requireNonNull(state, ROUTING_STATE);
this.reason = reason;
}
public RoutingInfo(StreamInput in) throws IOException {
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) {
this.currentAllocations = in.readVInt();
this.targetAllocations = in.readVInt();
} else {
this.currentAllocations = 0;
this.targetAllocations = 0;
}
this.state = in.readEnum(RoutingState.class);
this.reason = in.readOptionalString();
}
/**
* @return The number of allocations currently running on a node.
*/
public int getCurrentAllocations() {
return currentAllocations;
}
/**
* @return The number of allocations that have been assigned to a node, and will run on the node. Should never be
* higher than the number of available processors on the node.
*/
public int getTargetAllocations() {
return targetAllocations;
}
public int getFailedAllocations() {
return state == RoutingState.FAILED ? targetAllocations : 0;
}
public RoutingState getState() {
return state;
}
@Nullable
public String getReason() {
return reason;
}
public boolean isOutdated() {
return currentAllocations == 0 && targetAllocations == 0;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) {
out.writeVInt(currentAllocations);
out.writeVInt(targetAllocations);
}
out.writeEnum(state);
out.writeOptionalString(reason);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(CURRENT_ALLOCATIONS.getPreferredName(), currentAllocations);
builder.field(TARGET_ALLOCATIONS.getPreferredName(), targetAllocations);
builder.field(ROUTING_STATE.getPreferredName(), state);
if (reason != null) {
builder.field(REASON.getPreferredName(), reason);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
RoutingInfo that = (RoutingInfo) o;
return currentAllocations == that.currentAllocations
&& targetAllocations == that.targetAllocations
&& state == that.state
&& Objects.equals(reason, that.reason);
}
@Override
public int hashCode() {
return Objects.hash(currentAllocations, targetAllocations, state, reason);
}
@Override
public String toString() {
return "RoutingInfo{"
+ "current_allocations="
+ currentAllocations
+ ", target_allocations="
+ targetAllocations
+ ", reason='"
+ reason
+ '\''
+ ", state="
+ state
+ '}';
}
public boolean isRoutable() {
return state == RoutingState.STARTED && currentAllocations > 0;
}
}
| RoutingInfo |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/flogger/FloggerStringConcatenationTest.java | {
"start": 2015,
"end": 2703
} | class ____ {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private static final String CONSTANT = "constant";
public void method(String world, int i, long l, float f, double d, boolean b) {
logger.atInfo().log("hello %s%d%d%g%g%s%s", world, i, l, f, d, b, CONSTANT);
}
}
""")
.doTest();
}
@Test
public void constant() {
CompilationTestHelper.newInstance(FloggerStringConcatenation.class, getClass())
.addSourceLines(
"in/Test.java",
"""
import com.google.common.flogger.FluentLogger;
| Test |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmnode/RMNodeImpl.java | {
"start": 37707,
"end": 41395
} | class ____ implements
MultipleArcTransition<RMNodeImpl, RMNodeEvent, NodeState> {
@Override
public NodeState transition(RMNodeImpl rmNode, RMNodeEvent event) {
RMNodeReconnectEvent reconnectEvent = (RMNodeReconnectEvent) event;
RMNode newNode = reconnectEvent.getReconnectedNode();
rmNode.nodeManagerVersion = newNode.getNodeManagerVersion();
List<ApplicationId> runningApps = reconnectEvent.getRunningApplications();
boolean noRunningApps =
(runningApps == null) || (runningApps.size() == 0);
// No application running on the node, so send node-removal event with
// cleaning up old container info.
if (noRunningApps) {
if (rmNode.getState() == NodeState.DECOMMISSIONING) {
// When node in decommissioning, and no running apps on this node,
// it will return as decommissioned state.
deactivateNode(rmNode, NodeState.DECOMMISSIONED);
return NodeState.DECOMMISSIONED;
}
rmNode.nodeUpdateQueue.clear();
rmNode.context.getDispatcher().getEventHandler().handle(
new NodeRemovedSchedulerEvent(rmNode));
if (rmNode.getHttpPort() == newNode.getHttpPort()) {
if (!rmNode.getTotalCapability().equals(
newNode.getTotalCapability())) {
rmNode.totalCapability = newNode.getTotalCapability();
}
if (rmNode.getState().equals(NodeState.RUNNING)) {
// Only add old node if old state is RUNNING
rmNode.context.getDispatcher().getEventHandler().handle(
new NodeAddedSchedulerEvent(rmNode));
}
}
} else {
rmNode.httpPort = newNode.getHttpPort();
rmNode.httpAddress = newNode.getHttpAddress();
boolean isCapabilityChanged = false;
if (!rmNode.getTotalCapability().equals(
newNode.getTotalCapability())) {
rmNode.totalCapability = newNode.getTotalCapability();
isCapabilityChanged = true;
}
handleNMContainerStatus(reconnectEvent.getNMContainerStatuses(), rmNode);
for (ApplicationId appId : reconnectEvent.getRunningApplications()) {
handleRunningAppOnNode(rmNode, rmNode.context, appId, rmNode.nodeId);
}
if (isCapabilityChanged
&& rmNode.getState().equals(NodeState.RUNNING)) {
// Update scheduler node's capacity for reconnect node.
rmNode.context
.getDispatcher()
.getEventHandler()
.handle(
new NodeResourceUpdateSchedulerEvent(rmNode, ResourceOption
.newInstance(newNode.getTotalCapability(), -1)));
}
}
return rmNode.getState();
}
private void handleNMContainerStatus(
List<NMContainerStatus> nmContainerStatuses, RMNodeImpl rmnode) {
if (nmContainerStatuses != null) {
List<ContainerStatus> containerStatuses =
new ArrayList<ContainerStatus>();
for (NMContainerStatus nmContainerStatus : nmContainerStatuses) {
containerStatuses.add(createContainerStatus(nmContainerStatus));
}
rmnode.handleContainerStatus(containerStatuses);
}
}
private ContainerStatus createContainerStatus(
NMContainerStatus remoteContainer) {
ContainerStatus cStatus =
ContainerStatus.newInstance(remoteContainer.getContainerId(),
remoteContainer.getContainerState(),
remoteContainer.getDiagnostics(),
remoteContainer.getContainerExitStatus());
return cStatus;
}
}
public static | ReconnectNodeTransition |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/impl/FromHasNoOutputRouteTest.java | {
"start": 1143,
"end": 2162
} | class ____ extends ContextTestSupport {
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testFromHasNoOutputRoute() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
// has no output which is a mis configuration
from("direct:start");
}
});
Exception e = assertThrows(Exception.class, () -> context.start(), "Should throw exception");
FailedToCreateRouteException failed = assertIsInstanceOf(FailedToCreateRouteException.class, e);
assertTrue(failed.getRouteId().matches("route[0-9]+"));
IllegalArgumentException cause = assertIsInstanceOf(IllegalArgumentException.class, e.getCause());
assertTrue(cause.getMessage().matches(
"Route route[0-9]+\\Q has no output processors. You need to add outputs to the route such as to(\"log:foo\").\\E"));
}
}
| FromHasNoOutputRouteTest |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/propertyeditors/TimeZoneEditor.java | {
"start": 1114,
"end": 1501
} | class ____ extends PropertyEditorSupport {
@Override
public void setAsText(String text) throws IllegalArgumentException {
if (StringUtils.hasText(text)) {
text = text.trim();
}
setValue(StringUtils.parseTimeZoneString(text));
}
@Override
public String getAsText() {
TimeZone value = (TimeZone) getValue();
return (value != null ? value.getID() : "");
}
}
| TimeZoneEditor |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/template/SourceTargetMapperSingle.java | {
"start": 445,
"end": 1102
} | interface ____ {
SourceTargetMapperSingle INSTANCE = Mappers.getMapper( SourceTargetMapperSingle.class );
@Mappings({
@Mapping(target = "stringPropY", source = "stringPropX" ),
@Mapping(target = "integerPropY", source = "integerPropX" ),
@Mapping(target = "nestedResultProp", source = "nestedSourceProp.nested"),
@Mapping(target = "constantProp", constant = "constant"),
@Mapping(target = "expressionProp", expression = "java(\"expression\")"),
})
Target forwardCreate(Source s1);
@InheritConfiguration
void forwardUpdate(Source source, @MappingTarget Target target);
}
| SourceTargetMapperSingle |
java | quarkusio__quarkus | integration-tests/maven/src/test/resources-filtered/expected/create-extension-pom-add-to-bom/add-to-bom/deployment/src/test/java/org/acme/my/project/add/to/bom/test/AddToBomDevModeTest.java | {
"start": 326,
"end": 823
} | class ____ {
@RegisterExtension
static final QuarkusDevModeTest devModeTest = new QuarkusDevModeTest() // Start hot reload (DevMode) test with your extension loaded
.withEmptyApplication();
@Test
public void test() {
// Write your tests here - see the testing extension guide https://quarkus.io/guides/writing-extensions#testing-hot-reload for more information
Assertions.fail("Add dev mode assertions to " + getClass().getName());
}
}
| AddToBomDevModeTest |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/action/OpenAiActionCreator.java | {
"start": 2169,
"end": 4515
} | class ____ implements OpenAiActionVisitor {
public static final String COMPLETION_ERROR_PREFIX = "OpenAI chat completions";
public static final String USER_ROLE = "user";
static final ResponseHandler COMPLETION_HANDLER = new OpenAiChatCompletionResponseHandler(
"openai completion",
OpenAiChatCompletionResponseEntity::fromResponse
);
public static final ResponseHandler EMBEDDINGS_HANDLER = new OpenAiResponseHandler(
"openai text embedding",
OpenAiEmbeddingsResponseEntity::fromResponse,
false
);
private final Sender sender;
private final ServiceComponents serviceComponents;
public OpenAiActionCreator(Sender sender, ServiceComponents serviceComponents) {
this.sender = Objects.requireNonNull(sender);
this.serviceComponents = Objects.requireNonNull(serviceComponents);
}
@Override
public ExecutableAction create(OpenAiEmbeddingsModel model, Map<String, Object> taskSettings) {
var overriddenModel = OpenAiEmbeddingsModel.of(model, taskSettings);
var manager = new TruncatingRequestManager(
serviceComponents.threadPool(),
overriddenModel,
EMBEDDINGS_HANDLER,
(truncationResult) -> new OpenAiEmbeddingsRequest(serviceComponents.truncator(), truncationResult, overriddenModel),
overriddenModel.getServiceSettings().maxInputTokens()
);
var errorMessage = constructFailedToSendRequestMessage("OpenAI embeddings");
return new SenderExecutableAction(sender, manager, errorMessage);
}
@Override
public ExecutableAction create(OpenAiChatCompletionModel model, Map<String, Object> taskSettings) {
var overriddenModel = OpenAiChatCompletionModel.of(model, taskSettings);
var manager = new GenericRequestManager<>(
serviceComponents.threadPool(),
overriddenModel,
COMPLETION_HANDLER,
(inputs) -> new OpenAiUnifiedChatCompletionRequest(new UnifiedChatInput(inputs, USER_ROLE), overriddenModel),
ChatCompletionInput.class
);
var errorMessage = constructFailedToSendRequestMessage(COMPLETION_ERROR_PREFIX);
return new SingleInputSenderExecutableAction(sender, manager, errorMessage, COMPLETION_ERROR_PREFIX);
}
}
| OpenAiActionCreator |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopLongAggregator.java | {
"start": 2862,
"end": 3932
} | class ____ implements GroupingAggregatorState {
private final LongBucketedSort sort;
private GroupingState(BigArrays bigArrays, int limit, boolean ascending) {
this.sort = new LongBucketedSort(bigArrays, ascending ? SortOrder.ASC : SortOrder.DESC, limit);
}
public void add(int groupId, long value) {
sort.collect(value, groupId);
}
@Override
public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) {
blocks[offset] = toBlock(driverContext.blockFactory(), selected);
}
Block toBlock(BlockFactory blockFactory, IntVector selected) {
return sort.toBlock(blockFactory, selected);
}
@Override
public void enableGroupIdTracking(SeenGroupIds seen) {
// we figure out seen values from nulls on the values block
}
@Override
public void close() {
Releasables.closeExpectNoException(sort);
}
}
public static | GroupingState |
java | quarkusio__quarkus | extensions/qute/runtime/src/main/java/io/quarkus/qute/i18n/MessageBundle.java | {
"start": 560,
"end": 1046
} | interface ____ {
/**
* Constant value for {@link #locale()} indicating that the default locale specified via the {@code quarkus.default-locale}
* config property should be used.
*/
String DEFAULT_LOCALE = "<<default locale>>";
/**
* Constant value for {@link #value()}.
*/
String DEFAULT_NAME = "msg";
/**
* Constant value for {@link #value()} indicating that the name should be defaulted.
* <p>
* For a top-level | MessageBundle |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/locking/OptimisticLockTypeAllTest.java | {
"start": 754,
"end": 1682
} | class ____ {
@AfterEach
void tearDown(EntityManagerFactoryScope factoryScope) {
factoryScope.dropData();
}
@Test
public void test(EntityManagerFactoryScope factoryScope) {
factoryScope.inTransaction( entityManager -> {
var person = new Person();
person.setId(1L);
person.setName("John Doe");
person.setCountry("US");
person.setCity("New York");
person.setCreatedOn(new Timestamp(System.currentTimeMillis()));
entityManager.persist(person);
});
factoryScope.inTransaction( entityManager -> {
//tag::locking-optimistic-lock-type-all-update-example[]
var person = entityManager.find(Person.class, 1L);
person.setCity("Washington D.C.");
//end::locking-optimistic-lock-type-all-update-example[]
});
}
//tag::locking-optimistic-lock-type-all-example[]
@Entity(name = "Person")
@OptimisticLocking(type = OptimisticLockType.ALL)
@DynamicUpdate
public static | OptimisticLockTypeAllTest |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfo.java | {
"start": 1724,
"end": 14871
} | class ____ extends Block
implements LightWeightGSet.LinkedElement {
public static final BlockInfo[] EMPTY_ARRAY = {};
/**
* Replication factor.
*/
private short replication;
/**
* Block collection ID.
*/
private volatile long bcId;
/** For implementing {@link LightWeightGSet.LinkedElement} interface. */
private LightWeightGSet.LinkedElement nextLinkedElement;
/**
* This array contains triplets of references. For each i-th storage, the
* block belongs to triplets[3*i] is the reference to the
* {@link DatanodeStorageInfo} and triplets[3*i+1] and triplets[3*i+2] are
* references to the previous and the next blocks, respectively, in the list
* of blocks belonging to this storage.
*
* Using previous and next in Object triplets is done instead of a
* {@link LinkedList} list to efficiently use memory. With LinkedList the cost
* per replica is 42 bytes (LinkedList#Entry object per replica) versus 16
* bytes using the triplets.
*/
protected Object[] triplets;
private BlockUnderConstructionFeature uc;
/**
* Construct an entry for blocksmap
* @param size the block's replication factor, or the total number of blocks
* in the block group
*/
public BlockInfo(short size) {
this.triplets = new Object[3 * size];
this.bcId = INVALID_INODE_ID;
this.replication = isStriped() ? 0 : size;
}
public BlockInfo(Block blk, short size) {
super(blk);
this.triplets = new Object[3 * size];
this.bcId = INVALID_INODE_ID;
this.replication = isStriped() ? 0 : size;
}
public short getReplication() {
return replication;
}
public void setReplication(short repl) {
this.replication = repl;
}
public long getBlockCollectionId() {
return bcId;
}
public void setBlockCollectionId(long id) {
this.bcId = id;
}
public void delete() {
setBlockCollectionId(INVALID_INODE_ID);
}
public boolean isDeleted() {
return bcId == INVALID_INODE_ID;
}
public Iterator<DatanodeStorageInfo> getStorageInfos() {
return new BlocksMap.StorageIterator(this);
}
public DatanodeDescriptor getDatanode(int index) {
DatanodeStorageInfo storage = getStorageInfo(index);
return storage == null ? null : storage.getDatanodeDescriptor();
}
DatanodeStorageInfo getStorageInfo(int index) {
assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index * 3 < triplets.length : "Index is out of bound";
return (DatanodeStorageInfo)triplets[index * 3];
}
BlockInfo getPrevious(int index) {
assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index * 3 + 1 < triplets.length : "Index is out of bound";
BlockInfo info = (BlockInfo)triplets[index * 3 + 1];
assert info == null ||
info.getClass().getName().startsWith(BlockInfo.class.getName()) :
"BlockInfo is expected at " + (index * 3 + 1);
return info;
}
BlockInfo getNext(int index) {
assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index * 3 + 2 < triplets.length : "Index is out of bound";
BlockInfo info = (BlockInfo)triplets[index * 3 + 2];
assert info == null || info.getClass().getName().startsWith(
BlockInfo.class.getName()) :
"BlockInfo is expected at " + (index * 3 + 2);
return info;
}
void setStorageInfo(int index, DatanodeStorageInfo storage) {
assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index * 3 < triplets.length : "Index is out of bound";
triplets[index * 3] = storage;
}
/**
* Return the previous block on the block list for the datanode at
* position index. Set the previous block on the list to "to".
*
* @param index - the datanode index
* @param to - block to be set to previous on the list of blocks
* @return current previous block on the list of blocks
*/
BlockInfo setPrevious(int index, BlockInfo to) {
assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index * 3 + 1 < triplets.length : "Index is out of bound";
BlockInfo info = (BlockInfo) triplets[index * 3 + 1];
triplets[index * 3 + 1] = to;
return info;
}
/**
* Return the next block on the block list for the datanode at
* position index. Set the next block on the list to "to".
*
* @param index - the datanode index
* @param to - block to be set to next on the list of blocks
* @return current next block on the list of blocks
*/
BlockInfo setNext(int index, BlockInfo to) {
assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index * 3 + 2 < triplets.length : "Index is out of bound";
BlockInfo info = (BlockInfo) triplets[index * 3 + 2];
triplets[index * 3 + 2] = to;
return info;
}
public int getCapacity() {
assert this.triplets != null : "BlockInfo is not initialized";
assert triplets.length % 3 == 0 : "Malformed BlockInfo";
return triplets.length / 3;
}
/**
* Count the number of data-nodes the block currently belongs to (i.e., NN
* has received block reports from the DN).
*/
public abstract int numNodes();
/**
* Add a {@link DatanodeStorageInfo} location for a block
* @param storage The storage to add
* @param reportedBlock The block reported from the datanode. This is only
* used by erasure coded blocks, this block's id contains
* information indicating the index of the block in the
* corresponding block group.
*/
abstract boolean addStorage(DatanodeStorageInfo storage, Block reportedBlock);
/**
* Remove {@link DatanodeStorageInfo} location for a block
*/
abstract boolean removeStorage(DatanodeStorageInfo storage);
public abstract boolean isStriped();
public abstract BlockType getBlockType();
/** @return true if there is no datanode storage associated with the block */
abstract boolean hasNoStorage();
/**
* Checks whether this block has a Provided replica.
* @return true if this block has a replica on Provided storage.
*/
abstract boolean isProvided();
/**
* Find specified DatanodeStorageInfo.
* @return DatanodeStorageInfo or null if not found.
*/
DatanodeStorageInfo findStorageInfo(DatanodeDescriptor dn) {
int len = getCapacity();
DatanodeStorageInfo providedStorageInfo = null;
for(int idx = 0; idx < len; idx++) {
DatanodeStorageInfo cur = getStorageInfo(idx);
if(cur != null) {
if (cur.getStorageType() == StorageType.PROVIDED) {
// if block resides on provided storage, only match the storage ids
if (dn.getStorageInfo(cur.getStorageID()) != null) {
// do not return here as we have to check the other
// DatanodeStorageInfos for this block which could be local
providedStorageInfo = cur;
}
} else if (cur.getDatanodeDescriptor() == dn) {
return cur;
}
}
}
return providedStorageInfo;
}
/**
* Find specified DatanodeStorageInfo.
* @return index or -1 if not found.
*/
int findStorageInfo(DatanodeStorageInfo storageInfo) {
int len = getCapacity();
for(int idx = 0; idx < len; idx++) {
DatanodeStorageInfo cur = getStorageInfo(idx);
if (cur == storageInfo) {
return idx;
}
}
return -1;
}
/**
* Insert this block into the head of the list of blocks
* related to the specified DatanodeStorageInfo.
* If the head is null then form a new list.
* @return current block as the new head of the list.
*/
BlockInfo listInsert(BlockInfo head, DatanodeStorageInfo storage) {
int dnIndex = this.findStorageInfo(storage);
assert dnIndex >= 0 : "Data node is not found: current";
assert getPrevious(dnIndex) == null && getNext(dnIndex) == null :
"Block is already in the list and cannot be inserted.";
this.setPrevious(dnIndex, null);
this.setNext(dnIndex, head);
if (head != null) {
head.setPrevious(head.findStorageInfo(storage), this);
}
return this;
}
/**
* Remove this block from the list of blocks
* related to the specified DatanodeStorageInfo.
* If this block is the head of the list then return the next block as
* the new head.
* @return the new head of the list or null if the list becomes
* empy after deletion.
*/
BlockInfo listRemove(BlockInfo head, DatanodeStorageInfo storage) {
if (head == null) {
return null;
}
int dnIndex = this.findStorageInfo(storage);
if (dnIndex < 0) { // this block is not on the data-node list
return head;
}
BlockInfo next = this.getNext(dnIndex);
BlockInfo prev = this.getPrevious(dnIndex);
this.setNext(dnIndex, null);
this.setPrevious(dnIndex, null);
if (prev != null) {
prev.setNext(prev.findStorageInfo(storage), next);
}
if (next != null) {
next.setPrevious(next.findStorageInfo(storage), prev);
}
if (this == head) { // removing the head
head = next;
}
return head;
}
/**
* Remove this block from the list of blocks related to the specified
* DatanodeDescriptor. Insert it into the head of the list of blocks.
*
* @return the new head of the list.
*/
public BlockInfo moveBlockToHead(BlockInfo head, DatanodeStorageInfo storage,
int curIndex, int headIndex) {
if (head == this) {
return this;
}
BlockInfo next = this.setNext(curIndex, head);
BlockInfo prev = this.setPrevious(curIndex, null);
head.setPrevious(headIndex, this);
prev.setNext(prev.findStorageInfo(storage), next);
if (next != null) {
next.setPrevious(next.findStorageInfo(storage), prev);
}
return this;
}
@Override
public int hashCode() {
// Super implementation is sufficient
return super.hashCode();
}
@Override
public boolean equals(Object obj) {
// Sufficient to rely on super's implementation
return (this == obj) || super.equals(obj);
}
@Override
public LightWeightGSet.LinkedElement getNext() {
return nextLinkedElement;
}
@Override
public void setNext(LightWeightGSet.LinkedElement next) {
this.nextLinkedElement = next;
}
/* UnderConstruction Feature related */
public BlockUnderConstructionFeature getUnderConstructionFeature() {
return uc;
}
public BlockUCState getBlockUCState() {
return uc == null ? BlockUCState.COMPLETE : uc.getBlockUCState();
}
/**
* Is this block complete?
*
* @return true if the state of the block is {@link BlockUCState#COMPLETE}
*/
public boolean isComplete() {
return getBlockUCState().equals(BlockUCState.COMPLETE);
}
public boolean isUnderRecovery() {
return getBlockUCState().equals(BlockUCState.UNDER_RECOVERY);
}
public final boolean isCompleteOrCommitted() {
final BlockUCState state = getBlockUCState();
return state.equals(BlockUCState.COMPLETE) ||
state.equals(BlockUCState.COMMITTED);
}
/**
* Add/Update the under construction feature.
*/
public void convertToBlockUnderConstruction(BlockUCState s,
DatanodeStorageInfo[] targets) {
if (isComplete()) {
uc = new BlockUnderConstructionFeature(this, s, targets,
this.getBlockType());
} else {
// the block is already under construction
uc.setBlockUCState(s);
uc.setExpectedLocations(this, targets, this.getBlockType());
}
}
/**
* Convert an under construction block to complete.
*/
void convertToCompleteBlock() {
assert getBlockUCState() != BlockUCState.COMPLETE :
"Trying to convert a COMPLETE block";
uc = null;
}
/**
* Process the recorded replicas. When about to commit or finish the
* pipeline recovery sort out bad replicas.
* @param genStamp The final generation stamp for the block.
* @return staleReplica's List.
*/
public List<ReplicaUnderConstruction> setGenerationStampAndVerifyReplicas(
long genStamp) {
Preconditions.checkState(uc != null && !isComplete());
// Set the generation stamp for the block.
setGenerationStamp(genStamp);
return uc.getStaleReplicas(genStamp);
}
/**
* Commit block's length and generation stamp as reported by the client.
* Set block state to {@link BlockUCState#COMMITTED}.
* @param block - contains client reported block length and generation
* @return staleReplica's List.
* @throws IOException if block ids are inconsistent.
*/
List<ReplicaUnderConstruction> commitBlock(Block block) throws IOException {
if (getBlockId() != block.getBlockId()) {
throw new IOException("Trying to commit inconsistent block: id = "
+ block.getBlockId() + ", expected id = " + getBlockId());
}
Preconditions.checkState(!isComplete());
uc.commit();
this.setNumBytes(block.getNumBytes());
// Sort out invalid replicas.
return setGenerationStampAndVerifyReplicas(block.getGenerationStamp());
}
}
| BlockInfo |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/codec/CodecException.java | {
"start": 988,
"end": 1417
} | class ____ extends NestedRuntimeException {
/**
* Create a new CodecException.
* @param msg the detail message
*/
public CodecException(@Nullable String msg) {
super(msg);
}
/**
* Create a new CodecException.
* @param msg the detail message
* @param cause root cause for the exception, if any
*/
public CodecException(@Nullable String msg, @Nullable Throwable cause) {
super(msg, cause);
}
}
| CodecException |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/sinks/TableSinkBase.java | {
"start": 1222,
"end": 1390
} | class ____ implementing the deprecated {@link TableSink} interface. Implement
* {@link DynamicTableSink} directly instead.
*/
@Deprecated
@Internal
public abstract | is |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/parameters/converters/SortedSetConverter.java | {
"start": 332,
"end": 1721
} | class ____ implements ParameterConverter {
private final ParameterConverter delegate;
public SortedSetConverter(ParameterConverter delegate) {
this.delegate = delegate;
}
@Override
public Object convert(Object parameter) {
if (parameter instanceof List) {
SortedSet<Object> ret = new TreeSet<>();
List<String> values = (List<String>) parameter;
for (String val : values) {
if (delegate == null) {
ret.add(val);
} else {
ret.add(delegate.convert(val));
}
}
return ret;
} else if (parameter == null) {
return Collections.emptySortedSet();
} else if (delegate != null) {
SortedSet<Object> ret = new TreeSet<>();
ret.add(delegate.convert(parameter));
return ret;
} else {
SortedSet<Object> ret = new TreeSet<>();
ret.add(parameter);
return ret;
}
}
@Override
public void init(ParamConverterProviders deployment, Class<?> rawType, Type genericType, Annotation[] annotations) {
delegate.init(deployment, rawType, genericType, annotations);
}
@Override
public boolean isForSingleObjectContainer() {
return true;
}
public static | SortedSetConverter |
java | quarkusio__quarkus | core/runtime/src/main/java/io/quarkus/devservices/crossclassloader/runtime/RunningDevServicesRegistry.java | {
"start": 1127,
"end": 6307
} | class ____ in the system classloader. The value will be the same between dev and test mode.
public static final String APPLICATION_UUID = randomUUID().toString();
// This index is needed for the DevServicesConfigSource to be able to access the dev services running in a specific launch mode.
private static final Map<String, Set<RunningService>> servicesIndexedByLaunchMode = new ConcurrentHashMap<>();
// A dev service owner is a combination of an extension (feature) and the app type (dev or test) which identifies which dev services
// an extension processor can safely close.
private final Map<ComparableDevServicesConfig, RunningService> servicesIndexedByConfig = new ConcurrentHashMap<>();
private RunningDevServicesRegistry() {
}
void logClosing(String featureName, String launchMode, String containerId) {
log.debugf("Closing dev service for %s in launch mode %s: %s", featureName, launchMode, containerId);
}
void logFailedToClose(Exception e, String featureName, String launchMode, String containerId) {
log.infof(e, "Failed to close dev service for %s in launch mode %s: %s", featureName, launchMode, containerId);
}
public void closeAllRunningServices(String launchMode) {
Iterator<Map.Entry<ComparableDevServicesConfig, RunningService>> it = servicesIndexedByConfig.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<ComparableDevServicesConfig, RunningService> next = it.next();
DevServiceOwner owner = next.getKey().owner();
if (owner.launchMode().equals(launchMode)) {
it.remove();
RunningService service = next.getValue();
try {
logClosing(owner.featureName(), launchMode, service.containerId());
service.close();
} catch (Exception e) {
// We don't want to fail the shutdown hook if a service fails to close
logFailedToClose(e, owner.featureName(), launchMode, service.containerId());
}
}
}
servicesIndexedByLaunchMode.remove(launchMode);
}
public void closeRemainingRunningServices(UUID uuid, String launchMode, Collection<DevServiceOwner> ownersToKeep) {
Set<RunningService> services = servicesIndexedByLaunchMode.get(launchMode);
var iterator = servicesIndexedByConfig.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<ComparableDevServicesConfig, RunningService> entry = iterator.next();
DevServiceOwner owner = entry.getKey().owner();
UUID serviceAppUuid = entry.getKey().applicationInstanceId();
if (owner.launchMode().equals(launchMode) && Objects.equals(serviceAppUuid, uuid)
&& !ownersToKeep.contains(owner)) {
iterator.remove();
RunningService service = entry.getValue();
services.remove(service);
try {
logClosing(owner.featureName(), launchMode, service.containerId());
service.close();
} catch (Exception e) {
// We don't want to fail the shutdown hook if a service fails to close
logFailedToClose(e, owner.featureName(), launchMode, service.containerId());
}
}
}
}
public void closeAllRunningServices(DevServiceOwner owner) {
Set<RunningService> launchModeServices = servicesIndexedByLaunchMode.get(owner.launchMode());
var iterator = servicesIndexedByConfig.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<ComparableDevServicesConfig, RunningService> entry = iterator.next();
DevServiceOwner entryOwner = entry.getKey().owner();
if (Objects.equals(entryOwner, owner)) {
iterator.remove();
RunningService service = entry.getValue();
if (launchModeServices != null) {
launchModeServices.remove(service);
}
try {
logClosing(owner.featureName(), owner.featureName(), service.containerId());
service.close();
} catch (Exception e) {
// We don't want to fail the shutdown hook if a service fails to close
logFailedToClose(e, owner.featureName(), owner.launchMode(), service.containerId());
}
}
}
}
public Set<RunningService> getAllRunningServices(String launchMode) {
return servicesIndexedByLaunchMode.getOrDefault(launchMode, Collections.emptySet());
}
public RunningService getRunningServices(ComparableDevServicesConfig identifyingConfig) {
return servicesIndexedByConfig.get(identifyingConfig);
}
public void addRunningService(ComparableDevServicesConfig key, RunningService service) {
servicesIndexedByConfig.put(key, service);
servicesIndexedByLaunchMode.computeIfAbsent(key.owner().launchMode(), k -> new HashSet<>()).add(service);
}
}
| lives |
java | elastic__elasticsearch | x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/datageneration/AggregateMetricDoubleDataSourceHandler.java | {
"start": 750,
"end": 2987
} | class ____ implements DataSourceHandler {
@Override
public DataSourceResponse.AggregateMetricDoubleGenerator handle(DataSourceRequest.AggregateMetricDoubleGenerator request) {
return new DataSourceResponse.AggregateMetricDoubleGenerator(() -> {
var metricContainer = new HashMap<String, Number>();
// min and max must make sense - max has to be gte min
double min = ESTestCase.randomDoubleBetween(-Double.MAX_VALUE, 1_000_000_000, false);
double max = ESTestCase.randomDoubleBetween(min, Double.MAX_VALUE, true);
metricContainer.put("min", min);
metricContainer.put("max", max);
metricContainer.put("sum", ESTestCase.randomDouble());
metricContainer.put("value_count", ESTestCase.randomIntBetween(1, Integer.MAX_VALUE));
return metricContainer;
});
}
@Override
public DataSourceResponse.LeafMappingParametersGenerator handle(DataSourceRequest.LeafMappingParametersGenerator request) {
if (request.fieldType().equals("aggregate_metric_double") == false) {
return null;
}
return new DataSourceResponse.LeafMappingParametersGenerator(() -> {
var map = new HashMap<String, Object>();
List<AggregateMetricDoubleFieldMapper.Metric> metrics = ESTestCase.randomNonEmptySubsetOf(
Arrays.asList(AggregateMetricDoubleFieldMapper.Metric.values())
);
map.put("metrics", metrics.stream().map(Enum::toString).toList());
map.put("default_metric", metrics.get(ESTestCase.randomIntBetween(0, metrics.size() - 1)));
if (ESTestCase.randomBoolean()) {
map.put("ignore_malformed", ESTestCase.randomBoolean());
}
return map;
});
}
@Override
public DataSourceResponse.FieldDataGenerator handle(DataSourceRequest.FieldDataGenerator request) {
if (request.fieldType().equals("aggregate_metric_double") == false) {
return null;
}
return new DataSourceResponse.FieldDataGenerator(new AggregateMetricDoubleFieldDataGenerator(request.dataSource()));
}
}
| AggregateMetricDoubleDataSourceHandler |
java | quarkusio__quarkus | extensions/devui/runtime/src/main/java/io/quarkus/devui/runtime/jsonrpc/JsonRpcMethod.java | {
"start": 409,
"end": 4635
} | class ____ {
private Class bean;
private String methodName;
private String description;
private Method javaMethod;
private Map<String, Parameter> parameters;
private List<Usage> usage;
private boolean mcpEnabledByDefault = false;
private RuntimeValue runtimeValue;
private boolean isExplicitlyBlocking;
private boolean isExplicitlyNonBlocking;
public JsonRpcMethod() {
}
public Class getBean() {
return bean;
}
public void setBean(Class bean) {
this.bean = bean;
}
public String getMethodName() {
return methodName;
}
public String getJavaMethodName() {
if (methodName.contains(UNDERSCORE)) {
return methodName.substring(methodName.indexOf(UNDERSCORE) + 1);
}
return methodName;
}
public void setMethodName(String methodName) {
this.methodName = methodName;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public List<Usage> getUsage() {
return usage;
}
public void setUsage(List<Usage> usage) {
this.usage = usage;
}
public boolean isMcpEnabledByDefault() {
return mcpEnabledByDefault;
}
public void setMcpEnabledByDefault(boolean mcpEnabledByDefault) {
this.mcpEnabledByDefault = mcpEnabledByDefault;
}
public Method getJavaMethod() {
return javaMethod;
}
public void setJavaMethod(Method javaMethod) {
this.javaMethod = javaMethod;
}
public Map<String, Parameter> getParameters() {
return parameters;
}
public void setParameters(Map<String, Parameter> parameters) {
this.parameters = parameters;
}
public void addParameter(String name, String description) {
if (this.parameters == null)
this.parameters = new LinkedHashMap<>();
this.parameters.put(name, new Parameter(String.class, description, true));
}
public void addParameter(String name, String description, boolean required) {
if (this.parameters == null)
this.parameters = new LinkedHashMap<>();
this.parameters.put(name, new Parameter(String.class, description, required));
}
public void addParameter(String name, Class<?> type, String description) {
if (this.parameters == null)
this.parameters = new LinkedHashMap<>();
this.parameters.put(name, new Parameter(type, description, true));
}
public void addParameter(String name, Class<?> type, String description, boolean required) {
if (this.parameters == null)
this.parameters = new LinkedHashMap<>();
this.parameters.put(name, new Parameter(type, description, required));
}
public boolean hasParameters() {
return this.parameters != null && !this.parameters.isEmpty();
}
public RuntimeValue getRuntimeValue() {
return runtimeValue;
}
public void setRuntimeValue(RuntimeValue runtimeValue) {
this.runtimeValue = runtimeValue;
}
public boolean isIsExplicitlyBlocking() {
return isExplicitlyBlocking;
}
public void setIsExplicitlyBlocking(boolean isExplicitlyBlocking) {
this.isExplicitlyBlocking = isExplicitlyBlocking;
}
public boolean isIsExplicitlyNonBlocking() {
return isExplicitlyNonBlocking;
}
public void setIsExplicitlyNonBlocking(boolean isExplicitlyNonBlocking) {
this.isExplicitlyNonBlocking = isExplicitlyNonBlocking;
}
public boolean isReturningMulti() {
return javaMethod.getReturnType().getName().equals(Multi.class.getName());
}
public boolean isReturningUni() {
return javaMethod.getReturnType().getName().equals(Uni.class.getName());
}
public boolean isReturningCompletionStage() {
return javaMethod.getReturnType().getName().equals(CompletionStage.class.getName());
}
public boolean isReturningCompletableFuture() {
return javaMethod.getReturnType().getName().equals(CompletableFuture.class.getName());
}
public static | JsonRpcMethod |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/metamodel/mapping/AttributeMappingsList.java | {
"start": 691,
"end": 1055
} | interface ____ to allow
* custom implementations which can be highly optimised as
* necessary for our specific needs; for example the
* implementation {@link ImmutableAttributeMappingList}
* is able to avoid caching problems related to JDK-8180450, which would
* not have been possible with a standard generic container.
*
* @since 6.2
*/
@Incubating
public | is |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceDiagnosticsAction.java | {
"start": 894,
"end": 1515
} | class ____ extends BaseRestHandler {
@Override
public String getName() {
return "get_inference_diagnostics_action";
}
@Override
public List<Route> routes() {
return List.of(new Route(GET, INFERENCE_DIAGNOSTICS_PATH));
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) {
return channel -> client.execute(
GetInferenceDiagnosticsAction.INSTANCE,
new GetInferenceDiagnosticsAction.Request(),
new RestToXContentListener<>(channel)
);
}
}
| RestGetInferenceDiagnosticsAction |
java | dropwizard__dropwizard | dropwizard-health/src/main/java/io/dropwizard/health/response/JsonHealthResponseProvider.java | {
"start": 628,
"end": 3803
} | class ____ implements HealthResponseProvider {
public static final String CHECK_TYPE_QUERY_PARAM = "type";
public static final String NAME_QUERY_PARAM = "name";
public static final String ALL_VALUE = "all";
private static final Logger LOGGER = LoggerFactory.getLogger(JsonHealthResponseProvider.class);
private static final String MEDIA_TYPE = MediaType.APPLICATION_JSON;
@NonNull
private final HealthStatusChecker healthStatusChecker;
@NonNull
private final HealthStateAggregator healthStateAggregator;
@NonNull
private final ObjectMapper mapper;
public JsonHealthResponseProvider(@NonNull final HealthStatusChecker healthStatusChecker,
@NonNull final HealthStateAggregator healthStateAggregator,
@NonNull final ObjectMapper mapper) {
this.healthStatusChecker = Objects.requireNonNull(healthStatusChecker);
this.healthStateAggregator = Objects.requireNonNull(healthStateAggregator);
this.mapper = Objects.requireNonNull(mapper);
}
@NonNull
@Override
public HealthResponse healthResponse(final Map<String, Collection<String>> queryParams) {
final String type = queryParams.getOrDefault(CHECK_TYPE_QUERY_PARAM, Collections.emptyList())
.stream()
.findFirst()
.orElse(null);
final Collection<HealthStateView> views = getViews(queryParams);
final String responseBody;
try {
responseBody = mapper.writeValueAsString(views);
} catch (final Exception e) {
LOGGER.error("Failed to serialize health state views: {}", views, e);
throw new RuntimeException(e);
}
final boolean healthy = healthStatusChecker.isHealthy(type);
final int status;
if (healthy) {
// HTTP OK
status = 200;
} else {
// HTTP Service unavailable
status = 503;
}
return new HealthResponse(healthy, responseBody, MEDIA_TYPE, status);
}
private Set<String> getNamesFromQueryParams(final Map<String, Collection<String>> queryParams) {
return queryParams.getOrDefault(NAME_QUERY_PARAM, Collections.emptyList())
.stream()
// normalize all names to lowercase
.map(String::toLowerCase)
// maintain order by using a linked hash set
.collect(Collectors.toCollection(LinkedHashSet::new));
}
private Collection<HealthStateView> getViews(final Map<String, Collection<String>> queryParams) {
final Set<String> names = getNamesFromQueryParams(queryParams);
if (shouldReturnAllViews(names)) {
return List.copyOf(healthStateAggregator.healthStateViews());
} else {
return names.stream()
.map(healthStateAggregator::healthStateView)
.flatMap(Optional::stream)
.collect(Collectors.toUnmodifiableList());
}
}
private boolean shouldReturnAllViews(final Set<String> names) {
return names.contains(ALL_VALUE);
}
}
| JsonHealthResponseProvider |
java | quarkusio__quarkus | integration-tests/mongodb-panache/src/main/java/io/quarkus/it/mongodb/panache/person/resources/PersonRepositoryResource.java | {
"start": 849,
"end": 3743
} | class ____ {
// fake unused injection point to force ArC to not remove this otherwise I can't mock it in the tests
@Inject
MockablePersonRepository mockablePersonRepository;
@Inject
PersonRepository personRepository;
@GET
public List<Person> getPersons(@QueryParam("sort") String sort) {
if (sort != null) {
return personRepository.listAll(Sort.ascending(sort));
}
return personRepository.listAll();
}
@GET
@Path("/search/{name}")
public Set<PersonName> searchPersons(@PathParam("name") String name) {
Set<PersonName> uniqueNames = new HashSet<>();
List<PersonName> lastnames = personRepository.find("lastname = ?1 and status = ?2", name, Status.ALIVE)
.project(PersonName.class)
.withReadPreference(ReadPreference.primaryPreferred())
.list();
lastnames.forEach(p -> uniqueNames.add(p));// this will throw if it's not the right type
return uniqueNames;
}
@POST
public Response addPerson(Person person) {
personRepository.persist(person);
String id = person.id.toString();
return Response.created(URI.create("/persons/repository/" + id)).build();
}
@POST
@Path("/multiple")
public void addPersons(List<Person> persons) {
personRepository.persist(persons);
}
@PUT
public Response updatePerson(Person person) {
personRepository.update(person);
return Response.accepted().build();
}
// PATCH is not correct here but it allows to test persistOrUpdate without a specific subpath
@PATCH
public Response upsertPerson(Person person) {
personRepository.persistOrUpdate(person);
return Response.accepted().build();
}
@DELETE
@Path("/{id}")
public void deletePerson(@PathParam("id") String id) {
Person person = personRepository.findById(Long.parseLong(id));
personRepository.delete(person);
}
@GET
@Path("/{id}")
public Person getPerson(@PathParam("id") String id) {
return personRepository.findById(Long.parseLong(id));
}
@GET
@Path("/count")
public long countAll() {
return personRepository.count();
}
@DELETE
public void deleteAll() {
personRepository.deleteAll();
}
@POST
@Path("/rename")
public Response rename(@QueryParam("previousName") String previousName, @QueryParam("newName") String newName) {
personRepository.update("lastname", newName).where("lastname", previousName);
return Response.ok().build();
}
@GET
@Path("/search/by/nulls/precedence")
public Response searchPersonsByNullsPrecedence() {
personRepository.listAll(Sort.by("lastname", Sort.NullPrecedence.NULLS_FIRST));
return Response.ok().build();
}
}
| PersonRepositoryResource |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/PrivateConstructorForUtilityClassTest.java | {
"start": 1572,
"end": 1904
} | class ____ {
static void blah() {}
}
private Test() {}
}
""")
.expectUnchanged()
.doTest();
}
@Test
public void subClassesGetLeftAlone() {
testHelper
.addInputLines(
"in/Foo.java",
"""
public | Blah |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/type/classreading/SimpleMetadataReader.java | {
"start": 2125,
"end": 2587
} | class ____ version that is not supported yet. " +
"Consider compiling with a lower '-target' or upgrade your framework version. " +
"Affected class: " + resource, ex);
}
}
}
@Override
public Resource getResource() {
return this.resource;
}
@Override
public ClassMetadata getClassMetadata() {
return this.annotationMetadata;
}
@Override
public AnnotationMetadata getAnnotationMetadata() {
return this.annotationMetadata;
}
}
| file |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/asm/Attribute.java | {
"start": 22239,
"end": 23309
} | class ____ {
private static final int SIZE_INCREMENT = 6;
private int size;
private Attribute[] data = new Attribute[SIZE_INCREMENT];
void addAttributes(final Attribute attributeList) {
Attribute attribute = attributeList;
while (attribute != null) {
if (!contains(attribute)) {
add(attribute);
}
attribute = attribute.nextAttribute;
}
}
Attribute[] toArray() {
Attribute[] result = new Attribute[size];
System.arraycopy(data, 0, result, 0, size);
return result;
}
private boolean contains(final Attribute attribute) {
for (int i = 0; i < size; ++i) {
if (data[i].type.equals(attribute.type)) {
return true;
}
}
return false;
}
private void add(final Attribute attribute) {
if (size >= data.length) {
Attribute[] newData = new Attribute[data.length + SIZE_INCREMENT];
System.arraycopy(data, 0, newData, 0, size);
data = newData;
}
data[size++] = attribute;
}
}
}
| Set |
java | elastic__elasticsearch | modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java | {
"start": 4996,
"end": 14602
} | class ____ extends AbstractHttpServerTransport {
private static final Logger logger = LogManager.getLogger(Netty4HttpServerTransport.class);
private final int pipeliningMaxEvents;
private final SharedGroupFactory sharedGroupFactory;
private final RecvByteBufAllocator recvByteBufAllocator;
private final TLSConfig tlsConfig;
private final AcceptChannelHandler.AcceptPredicate acceptChannelPredicate;
private final HttpValidator httpValidator;
private final ThreadWatchdog threadWatchdog;
private final int readTimeoutMillis;
private final int maxCompositeBufferComponents;
private volatile ServerBootstrap serverBootstrap;
private volatile SharedGroupFactory.SharedGroup sharedGroup;
private final TlsHandshakeThrottleManager tlsHandshakeThrottleManager;
public Netty4HttpServerTransport(
Settings settings,
NetworkService networkService,
ThreadPool threadPool,
NamedXContentRegistry xContentRegistry,
Dispatcher dispatcher,
ClusterSettings clusterSettings,
SharedGroupFactory sharedGroupFactory,
TelemetryProvider telemetryProvider,
TLSConfig tlsConfig,
@Nullable AcceptChannelHandler.AcceptPredicate acceptChannelPredicate,
@Nullable HttpValidator httpValidator
) {
super(
settings,
networkService,
Netty4Utils.createRecycler(settings),
threadPool,
xContentRegistry,
dispatcher,
clusterSettings,
telemetryProvider
);
Netty4Utils.setAvailableProcessors(EsExecutors.allocatedProcessors(settings));
NettyAllocator.logAllocatorDescriptionIfNeeded();
this.sharedGroupFactory = sharedGroupFactory;
this.tlsConfig = tlsConfig;
this.acceptChannelPredicate = acceptChannelPredicate;
this.httpValidator = httpValidator;
this.threadWatchdog = networkService.getThreadWatchdog();
this.pipeliningMaxEvents = SETTING_PIPELINING_MAX_EVENTS.get(settings);
this.maxCompositeBufferComponents = Netty4Plugin.SETTING_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS.get(settings);
this.readTimeoutMillis = Math.toIntExact(SETTING_HTTP_READ_TIMEOUT.get(settings).getMillis());
this.tlsHandshakeThrottleManager = new TlsHandshakeThrottleManager(clusterSettings, telemetryProvider.getMeterRegistry());
ByteSizeValue receivePredictor = Netty4Plugin.SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE.get(settings);
recvByteBufAllocator = new FixedRecvByteBufAllocator(receivePredictor.bytesAsInt());
logger.debug(
"using max_chunk_size[{}], max_header_size[{}], max_initial_line_length[{}], max_content_length[{}], "
+ "receive_predictor[{}], max_composite_buffer_components[{}], pipelining_max_events[{}]",
SETTING_HTTP_MAX_CHUNK_SIZE.get(settings),
SETTING_HTTP_MAX_HEADER_SIZE.get(settings),
SETTING_HTTP_MAX_INITIAL_LINE_LENGTH.get(settings),
maxContentLength,
receivePredictor,
maxCompositeBufferComponents,
pipeliningMaxEvents
);
}
public Settings settings() {
return this.settings;
}
@Override
protected void startInternal() {
boolean success = false;
try {
tlsHandshakeThrottleManager.start();
sharedGroup = sharedGroupFactory.getHttpGroup();
serverBootstrap = new ServerBootstrap();
serverBootstrap.group(sharedGroup.getLowLevelGroup());
// NettyAllocator will return the channel type designed to work with the configuredAllocator
serverBootstrap.channel(NettyAllocator.getServerChannelType());
// Set the allocators for both the server channel and the child channels created
serverBootstrap.option(ChannelOption.ALLOCATOR, NettyAllocator.getAllocator());
serverBootstrap.childOption(ChannelOption.ALLOCATOR, NettyAllocator.getAllocator());
serverBootstrap.childHandler(configureServerChannelHandler());
serverBootstrap.handler(ServerChannelExceptionHandler.INSTANCE);
serverBootstrap.childOption(ChannelOption.TCP_NODELAY, SETTING_HTTP_TCP_NO_DELAY.get(settings));
serverBootstrap.childOption(ChannelOption.SO_KEEPALIVE, SETTING_HTTP_TCP_KEEP_ALIVE.get(settings));
if (SETTING_HTTP_TCP_KEEP_ALIVE.get(settings)) {
// Netty logs a warning if it can't set the option, so try this only on supported platforms
if (IOUtils.LINUX || IOUtils.MAC_OS_X) {
if (SETTING_HTTP_TCP_KEEP_IDLE.get(settings) >= 0) {
serverBootstrap.childOption(
NioChannelOption.of(NetUtils.getTcpKeepIdleSocketOption()),
SETTING_HTTP_TCP_KEEP_IDLE.get(settings)
);
}
if (SETTING_HTTP_TCP_KEEP_INTERVAL.get(settings) >= 0) {
serverBootstrap.childOption(
NioChannelOption.of(NetUtils.getTcpKeepIntervalSocketOption()),
SETTING_HTTP_TCP_KEEP_INTERVAL.get(settings)
);
}
if (SETTING_HTTP_TCP_KEEP_COUNT.get(settings) >= 0) {
serverBootstrap.childOption(
NioChannelOption.of(NetUtils.getTcpKeepCountSocketOption()),
SETTING_HTTP_TCP_KEEP_COUNT.get(settings)
);
}
}
}
final ByteSizeValue tcpSendBufferSize = SETTING_HTTP_TCP_SEND_BUFFER_SIZE.get(settings);
if (tcpSendBufferSize.getBytes() > 0) {
serverBootstrap.childOption(ChannelOption.SO_SNDBUF, Math.toIntExact(tcpSendBufferSize.getBytes()));
}
final ByteSizeValue tcpReceiveBufferSize = SETTING_HTTP_TCP_RECEIVE_BUFFER_SIZE.get(settings);
if (tcpReceiveBufferSize.getBytes() > 0) {
serverBootstrap.childOption(ChannelOption.SO_RCVBUF, Math.toIntExact(tcpReceiveBufferSize.getBytes()));
}
serverBootstrap.option(ChannelOption.RCVBUF_ALLOCATOR, recvByteBufAllocator);
serverBootstrap.childOption(ChannelOption.RCVBUF_ALLOCATOR, recvByteBufAllocator);
final boolean reuseAddress = SETTING_HTTP_TCP_REUSE_ADDRESS.get(settings);
serverBootstrap.option(ChannelOption.SO_REUSEADDR, reuseAddress);
serverBootstrap.childOption(ChannelOption.SO_REUSEADDR, reuseAddress);
bindServer();
if (acceptChannelPredicate != null) {
acceptChannelPredicate.setBoundAddress(boundAddress());
}
success = true;
} finally {
if (success == false) {
doStop(); // otherwise we leak threads since we never moved to started
}
}
}
@Override
protected HttpServerChannel bind(InetSocketAddress socketAddress) throws Exception {
ChannelFuture future = serverBootstrap.bind(socketAddress).sync();
Channel channel = future.channel();
Netty4HttpServerChannel httpServerChannel = new Netty4HttpServerChannel(channel);
channel.attr(HTTP_SERVER_CHANNEL_KEY).set(httpServerChannel);
return httpServerChannel;
}
@Override
protected void stopInternal() {
if (tlsHandshakeThrottleManager.lifecycleState() != Lifecycle.State.INITIALIZED) {
tlsHandshakeThrottleManager.stop();
}
if (sharedGroup != null) {
sharedGroup.shutdown();
sharedGroup = null;
}
}
@Override
public void onException(HttpChannel channel, Exception cause) {
if (lifecycle.started() == false) {
return;
}
if (SSLExceptionHelper.isNotSslRecordException(cause)) {
logger.warn("received plaintext http traffic on an https channel, closing connection {}", channel);
CloseableChannel.closeChannel(channel);
} else if (SSLExceptionHelper.isCloseDuringHandshakeException(cause)) {
logger.debug("connection {} closed during ssl handshake", channel);
CloseableChannel.closeChannel(channel);
} else if (SSLExceptionHelper.isInsufficientBufferRemainingException(cause)) {
logger.debug("connection {} closed abruptly", channel);
CloseableChannel.closeChannel(channel);
} else if (SSLExceptionHelper.isReceivedCertificateUnknownException(cause)) {
logger.warn("http client did not trust this server's certificate, closing connection {}", channel);
CloseableChannel.closeChannel(channel);
} else if (cause instanceof ReadTimeoutException) {
super.onException(channel, new HttpReadTimeoutException(readTimeoutMillis, cause));
} else {
super.onException(channel, cause);
}
}
public ChannelHandler configureServerChannelHandler() {
return new HttpChannelHandler(this, handlingSettings, tlsConfig, acceptChannelPredicate, httpValidator);
}
static final AttributeKey<Netty4HttpChannel> HTTP_CHANNEL_KEY = AttributeKey.newInstance("es-http-channel");
static final AttributeKey<Netty4HttpServerChannel> HTTP_SERVER_CHANNEL_KEY = AttributeKey.newInstance("es-http-server-channel");
protected static | Netty4HttpServerTransport |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryWriter.java | {
"start": 1940,
"end": 4292
} | interface ____ {
/**
* This method writes the information of <code>RMApp</code> that is available
* when it starts.
*
* @param appStart
* the record of the information of <code>RMApp</code> that is
* available when it starts
* @throws IOException
*/
void applicationStarted(ApplicationStartData appStart) throws IOException;
/**
* This method writes the information of <code>RMApp</code> that is available
* when it finishes.
*
* @param appFinish
* the record of the information of <code>RMApp</code> that is
* available when it finishes
* @throws IOException
*/
void applicationFinished(ApplicationFinishData appFinish) throws IOException;
/**
* This method writes the information of <code>RMAppAttempt</code> that is
* available when it starts.
*
* @param appAttemptStart
* the record of the information of <code>RMAppAttempt</code> that is
* available when it starts
* @throws IOException
*/
void applicationAttemptStarted(ApplicationAttemptStartData appAttemptStart)
throws IOException;
/**
* This method writes the information of <code>RMAppAttempt</code> that is
* available when it finishes.
*
* @param appAttemptFinish
* the record of the information of <code>RMAppAttempt</code> that is
* available when it finishes
* @throws IOException
*/
void
applicationAttemptFinished(ApplicationAttemptFinishData appAttemptFinish)
throws IOException;
/**
* This method writes the information of <code>RMContainer</code> that is
* available when it starts.
*
* @param containerStart
* the record of the information of <code>RMContainer</code> that is
* available when it starts
* @throws IOException
*/
void containerStarted(ContainerStartData containerStart) throws IOException;
/**
* This method writes the information of <code>RMContainer</code> that is
* available when it finishes.
*
* @param containerFinish
* the record of the information of <code>RMContainer</code> that is
* available when it finishes
* @throws IOException
*/
void containerFinished(ContainerFinishData containerFinish)
throws IOException;
}
| ApplicationHistoryWriter |
java | apache__dubbo | dubbo-config/dubbo-config-spring/src/test/java/org/apache/dubbo/config/spring/samples/ZookeeperDubboSpringConsumerBootstrap.java | {
"start": 1438,
"end": 2308
} | class ____ {
private static final Logger logger = LoggerFactory.getLogger(ZookeeperDubboSpringConsumerBootstrap.class);
@DubboReference(services = "${dubbo.provider.name},${dubbo.provider.name1},${dubbo.provider.name2}")
private DemoService demoService;
public static void main(String[] args) throws Exception {
Class<?> beanType = ZookeeperDubboSpringConsumerBootstrap.class;
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(beanType);
ZookeeperDubboSpringConsumerBootstrap bootstrap = context.getBean(ZookeeperDubboSpringConsumerBootstrap.class);
for (int i = 0; i < 100; i++) {
logger.info(bootstrap.demoService.sayName("Hello"));
Thread.sleep(1000L);
}
System.in.read();
context.close();
}
}
| ZookeeperDubboSpringConsumerBootstrap |
java | micronaut-projects__micronaut-core | http-netty/src/main/java/io/micronaut/http/netty/SslContextHolder.java | {
"start": 1277,
"end": 2222
} | class ____ missing
* @author Jonas Konrad
* @since 4.10.0
*/
@Internal
public record SslContextHolder(
@Nullable SslContext sslContext,
@Nullable Object quicSslContextObject
) {
/**
* Retain the underlying Netty contexts for safe reuse.
*/
public void retain() {
if (sslContext != null) {
ReferenceCountUtil.retain(sslContext);
}
if (quicSslContextObject != null) {
ReferenceCountUtil.retain(quicSslContextObject);
}
}
/**
* Release the underlying Netty contexts when no longer needed.
*/
public void release() {
if (sslContext != null) {
ReferenceCountUtil.release(sslContext);
}
if (quicSslContextObject != null) {
ReferenceCountUtil.release(quicSslContextObject);
}
}
public QuicSslContext quicSslContext() {
return (QuicSslContext) quicSslContextObject;
}
}
| is |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/introspect/IntrospectorPairTest.java | {
"start": 1867,
"end": 2553
} | class ____ extends AnnotationIntrospector {
final Object _deserializer;
final Object _serializer;
public IntrospectorWithHandlers(Object deser, Object ser) {
_deserializer = deser;
_serializer = ser;
}
@Override
public Version version() {
return Version.unknownVersion();
}
@Override
public Object findDeserializer(MapperConfig<?> config, Annotated am) {
return _deserializer;
}
@Override
public Object findSerializer(MapperConfig<?> config, Annotated am) {
return _serializer;
}
}
static | IntrospectorWithHandlers |
java | quarkusio__quarkus | integration-tests/rest-client-reactive-multipart/src/test/java/io/quarkus/it/rest/client/multipart/JsonSerializationTest.java | {
"start": 210,
"end": 1363
} | class ____ {
@Test
public void testEcho() {
RestAssured
.with()
.body("{\"publicName\":\"Leo\",\"veterinarian\":{\"name\":\"Dolittle\"},\"age\":5}")
.contentType("application/json; charset=utf-8")
.post("/json-serialization/dog-echo")
.then()
.statusCode(200)
.contentType("application/json")
.body("publicName", Matchers.is("Leo"))
.body("privateName", Matchers.nullValue())
.body("age", Matchers.is(5))
.body("veterinarian.name", Matchers.is("Dolittle"))
.body("veterinarian.title", Matchers.nullValue());
}
@Test
public void testInterface() {
RestAssured
.with()
.get("/json-serialization/interface")
.then()
.statusCode(200)
.body("nestedInterface.int", Matchers.is(42))
.body("nestedInterface.character", Matchers.is("a"))
.body("nestedInterface.string", Matchers.is("response"));
}
}
| JsonSerializationTest |
java | hibernate__hibernate-orm | hibernate-envers/src/main/java/org/hibernate/envers/configuration/internal/metadata/AuditTableData.java | {
"start": 348,
"end": 986
} | class ____ {
private final String auditEntityName;
private final String auditTableName;
private final String schema;
private final String catalog;
public AuditTableData(String auditEntityName, String auditTableName, String schema, String catalog) {
this.auditEntityName = auditEntityName;
this.auditTableName = auditTableName;
this.schema = schema;
this.catalog = catalog;
}
public String getAuditEntityName() {
return auditEntityName;
}
public String getAuditTableName() {
return auditTableName;
}
public String getSchema() {
return schema;
}
public String getCatalog() {
return catalog;
}
}
| AuditTableData |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/fields/RecursiveComparisonAssert_isEqualTo_ignoringCollectionOrder_Test.java | {
"start": 20894,
"end": 22259
} | class ____ {
String name;
int type;
PersonWithInt(String name, int type) {
this.name = name;
this.type = type;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PersonWithInt that = (PersonWithInt) o;
return type == that.type && Objects.equals(name, that.name);
}
@Override
public int hashCode() {
return Objects.hash(name, type);
}
@Override
public String toString() {
return "Person [name=%s, type=%s]".formatted(name, type);
}
}
@Test
public void should_not_remove_already_visited_int_dual_values_as_they_cant_produce_cycles() {
// GIVEN
List<PersonWithInt> persons = list(new PersonWithInt("name-1", 1),
new PersonWithInt("name-2", 1),
new PersonWithInt("name-2", 2));
// WHEN/THEN
then(persons).usingRecursiveComparison(recursiveComparisonConfiguration)
.ignoringCollectionOrder()
.isEqualTo(list(new PersonWithInt("name-2", 2),
new PersonWithInt("name-2", 1),
new PersonWithInt("name-1", 1)));
}
// https://github.com/assertj/assertj/issues/2954
static | PersonWithInt |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/jmx/access/MBeanClientInterceptor.java | {
"start": 22630,
"end": 24187
} | class ____ implements Comparable<MethodCacheKey> {
private final String name;
private final Class<?>[] parameterTypes;
/**
* Create a new instance of {@code MethodCacheKey} with the supplied
* method name and parameter list.
* @param name the name of the method
* @param parameterTypes the arguments in the method signature
*/
public MethodCacheKey(String name, Class<?> @Nullable [] parameterTypes) {
this.name = name;
this.parameterTypes = (parameterTypes != null ? parameterTypes : new Class<?>[0]);
}
@Override
public boolean equals(@Nullable Object other) {
return (this == other || (other instanceof MethodCacheKey that &&
this.name.equals(that.name) &&
Arrays.equals(this.parameterTypes, that.parameterTypes)));
}
@Override
public int hashCode() {
return this.name.hashCode();
}
@Override
public String toString() {
return this.name + "(" + StringUtils.arrayToCommaDelimitedString(this.parameterTypes) + ")";
}
@Override
public int compareTo(MethodCacheKey other) {
int result = this.name.compareTo(other.name);
if (result != 0) {
return result;
}
if (this.parameterTypes.length < other.parameterTypes.length) {
return -1;
}
if (this.parameterTypes.length > other.parameterTypes.length) {
return 1;
}
for (int i = 0; i < this.parameterTypes.length; i++) {
result = this.parameterTypes[i].getName().compareTo(other.parameterTypes[i].getName());
if (result != 0) {
return result;
}
}
return 0;
}
}
}
| MethodCacheKey |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshots.java | {
"start": 1427,
"end": 1592
} | class ____ used to find shard files that were already snapshotted and clear out shard files that are no longer referenced by any
* snapshots of the shard.
*/
public | is |
java | apache__camel | components/camel-spring-parent/camel-spring-ws/src/main/java/org/apache/camel/component/spring/ws/SpringWebserviceConstants.java | {
"start": 936,
"end": 3343
} | class ____ {
@Metadata(label = "producer", description = "The endpoint URI", javaType = "String")
public static final String SPRING_WS_ENDPOINT_URI = "CamelSpringWebserviceEndpointUri";
@Metadata(label = "producer",
description = "SOAP action to include inside a SOAP request when accessing remote web services",
javaType = "String")
public static final String SPRING_WS_SOAP_ACTION = "CamelSpringWebserviceSoapAction";
@Metadata(label = "producer", description = "The soap header source", javaType = "javax.xml.transform.Source")
public static final String SPRING_WS_SOAP_HEADER = "CamelSpringWebserviceSoapHeader";
/**
* WS-Addressing 1.0 action header to include when accessing web services. The To header is set to the address of
* the web service as specified in the endpoint URI (default Spring-WS behavior).
*/
@Metadata(label = "producer", javaType = "java.net.URI")
public static final String SPRING_WS_ADDRESSING_ACTION = "CamelSpringWebserviceAddressingAction";
/**
* Signifies the value for the faultAction response WS-Addressing <code>FaultTo</code> header that is provided by
* the method.
*
* See org.springframework.ws.soap.addressing.server.annotation.Action annotation for more details.
*/
@Metadata(label = "producer", javaType = "java.net.URI")
public static final String SPRING_WS_ADDRESSING_PRODUCER_FAULT_TO = "CamelSpringWebserviceAddressingFaultTo";
/**
* Signifies the value for the replyTo response WS-Addressing <code>ReplyTo</code> header that is provided by the
* method.
*
* See org.springframework.ws.soap.addressing.server.annotation.Action annotation for more details.
*/
@Metadata(label = "producer", javaType = "java.net.URI")
public static final String SPRING_WS_ADDRESSING_PRODUCER_REPLY_TO = "CamelSpringWebserviceAddressingReplyTo";
public static final String SPRING_WS_ADDRESSING_CONSUMER_OUTPUT_ACTION = "CamelSpringWebserviceAddressingOutputAction";
public static final String SPRING_WS_ADDRESSING_CONSUMER_FAULT_ACTION = "CamelSpringWebserviceAddressingFaultAction";
@Metadata(label = "consumer", description = "The breadcrumb id.", javaType = "String")
public static final String BREADCRUMB_ID = Exchange.BREADCRUMB_ID;
private SpringWebserviceConstants() {
}
}
| SpringWebserviceConstants |
java | apache__logging-log4j2 | log4j-1.2-api/src/main/java/org/apache/log4j/helpers/OptionConverter.java | {
"start": 26203,
"end": 27235
} | class ____ toLevel(String, Level)", e);
} catch (final java.lang.reflect.InvocationTargetException e) {
if (e.getTargetException() instanceof InterruptedException
|| e.getTargetException() instanceof InterruptedIOException) {
Thread.currentThread().interrupt();
}
LOGGER.warn("custom level class [" + clazz + "]" + " could not be instantiated", e);
} catch (final ClassCastException e) {
LOGGER.warn("class [" + clazz + "] is not a subclass of org.apache.log4j.Level", e);
} catch (final IllegalAccessException e) {
LOGGER.warn("class [" + clazz + "] cannot be instantiated due to access restrictions", e);
} catch (final RuntimeException e) {
LOGGER.warn("class [" + clazz + "], level [" + levelName + "] conversion failed.", e);
}
return defaultValue;
}
/**
* OptionConverter is a static class.
*/
private OptionConverter() {}
private static | function |
java | grpc__grpc-java | core/src/main/java/io/grpc/internal/ContextRunnable.java | {
"start": 850,
"end": 1222
} | class ____ implements Runnable {
private final Context context;
protected ContextRunnable(Context context) {
this.context = context;
}
@Override
public final void run() {
Context previous = context.attach();
try {
runInContext();
} finally {
context.detach(previous);
}
}
public abstract void runInContext();
}
| ContextRunnable |
java | google__error-prone | core/src/test/java/com/google/errorprone/scanner/ScannerTest.java | {
"start": 1789,
"end": 2177
} | class ____ {
// BUG: Diagnostic contains: ShouldNotUseFoo
Foo foo;
}
""")
.doTest();
}
@Test
public void notSuppressedByAnnotationOnParameterizedType() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import com.google.errorprone.scanner.ScannerTest.Foo;
| Test |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/error/ShouldBeDirectory.java | {
"start": 910,
"end": 1632
} | class ____ extends BasicErrorMessageFactory {
private static final String PATH_SHOULD_BE_DIRECTORY = "%nExpecting path:%n %s%nto be a directory.";
private static final String FILE_SHOULD_BE_DIRECTORY = "%nExpecting file:%n %s%nto be an existing directory.";
public static ErrorMessageFactory shouldBeDirectory(final Path actual) {
return new ShouldBeDirectory(actual);
}
public static ErrorMessageFactory shouldBeDirectory(final File actual) {
return new ShouldBeDirectory(actual);
}
private ShouldBeDirectory(final Path actual) {
super(PATH_SHOULD_BE_DIRECTORY, actual);
}
private ShouldBeDirectory(final File actual) {
super(FILE_SHOULD_BE_DIRECTORY, actual);
}
}
| ShouldBeDirectory |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/concurrent/ConcurrentRuntimeException.java | {
"start": 988,
"end": 1429
} | class ____ an analogue of the {@link ConcurrentException} exception class.
* However, it is a runtime exception and thus does not need explicit catch
* clauses. Some methods of {@link ConcurrentUtils} throw {@code
* ConcurrentRuntimeException} exceptions rather than
* {@link ConcurrentException} exceptions. They can be used by client code that
* does not want to be bothered with checked exceptions.
* </p>
*
* @since 3.0
*/
public | is |
java | quarkusio__quarkus | integration-tests/main/src/test/java/io/quarkus/it/main/DefaultMethodTestCase.java | {
"start": 756,
"end": 893
} | class ____ implements NonTestInterface {
@Override
public void abstractMethod() {
}
}
| NonTestInterfaceTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/ResourceLocalizationResponsePBImpl.java | {
"start": 1158,
"end": 2309
} | class ____ extends
ResourceLocalizationResponse {
ResourceLocalizationResponseProto proto =
ResourceLocalizationResponseProto.getDefaultInstance();
ResourceLocalizationResponseProto.Builder builder = null;
boolean viaProto = false;
public YarnServiceProtos.ResourceLocalizationResponseProto getProto() {
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
public ResourceLocalizationResponsePBImpl() {
builder = ResourceLocalizationResponseProto.newBuilder();
}
public ResourceLocalizationResponsePBImpl(
ResourceLocalizationResponseProto proto) {
this.proto = proto;
viaProto = true;
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null) {
return false;
}
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
}
| ResourceLocalizationResponsePBImpl |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/alterTable/MySqlAlterTableTest49.java | {
"start": 323,
"end": 888
} | class ____ extends TestCase {
public void test_0() throws Exception {
String sql = "alter table a add column auto_id int not null auto_increment by group primary key;";
List<SQLStatement> stmtList = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
assertEquals(1, stmtList.size());
SQLStatement stmt = stmtList.get(0);
String output = stmt.toString();
assertEquals("ALTER TABLE a\n" +
"\tADD COLUMN auto_id int NOT NULL PRIMARY KEY AUTO_INCREMENT BY GROUP;", output);
}
}
| MySqlAlterTableTest49 |
java | micronaut-projects__micronaut-core | http/src/main/java/io/micronaut/runtime/http/scope/RequestCustomScope.java | {
"start": 1516,
"end": 4400
} | class ____ extends AbstractConcurrentCustomScope<RequestScope> implements ApplicationEventListener<HttpRequestTerminatedEvent> {
/**
* The request attribute to store scoped beans in.
*/
public static final String SCOPED_BEANS_ATTRIBUTE = "io.micronaut.http.SCOPED_BEANS";
/**
* Creates the request scope for the given context.
*
*/
public RequestCustomScope() {
super(RequestScope.class);
}
@Override
public void close() {
ServerRequestContext.currentRequest().ifPresent(this::destroyBeans);
}
@Override
public boolean isRunning() {
return ServerRequestContext.currentRequest().isPresent();
}
@Override
public void onApplicationEvent(HttpRequestTerminatedEvent event) {
destroyBeans(event.getSource());
}
@NonNull
@Override
protected Map<BeanIdentifier, CreatedBean<?>> getScopeMap(boolean forCreation) {
final HttpRequest<Object> request = ServerRequestContext.currentRequest().orElse(null);
if (request != null) {
//noinspection ConstantConditions
return getRequestAttributeMap(request, forCreation);
} else {
throw new IllegalStateException("No request present");
}
}
@NonNull
@Override
protected <T> CreatedBean<T> doCreate(@NonNull BeanCreationContext<T> creationContext) {
final HttpRequest<Object> request = ServerRequestContext.currentRequest().orElse(null);
final CreatedBean<T> createdBean = super.doCreate(creationContext);
final T bean = createdBean.bean();
if (bean instanceof RequestAware aware) {
aware.setRequest(request);
}
return createdBean;
}
/**
* Destroys the request scoped beans for the given request.
* @param request The request
*/
private void destroyBeans(HttpRequest<?> request) {
ArgumentUtils.requireNonNull("request", request);
ConcurrentHashMap<BeanIdentifier, CreatedBean<?>> requestScopedBeans =
getRequestAttributeMap(request, false);
if (requestScopedBeans != null) {
destroyScope(requestScopedBeans);
}
}
private <T> ConcurrentHashMap<BeanIdentifier, CreatedBean<?>> getRequestAttributeMap(HttpRequest<T> httpRequest, boolean create) {
MutableConvertibleValues<Object> attrs = httpRequest.getAttributes();
Object o = attrs.getValue(SCOPED_BEANS_ATTRIBUTE);
if (o instanceof ConcurrentHashMap) {
return (ConcurrentHashMap<BeanIdentifier, CreatedBean<?>>) o;
}
if (create) {
ConcurrentHashMap<BeanIdentifier, CreatedBean<?>> scopedBeans = new ConcurrentHashMap<>(5);
attrs.put(SCOPED_BEANS_ATTRIBUTE, scopedBeans);
return scopedBeans;
}
return null;
}
}
| RequestCustomScope |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/Cw2EndpointBuilderFactory.java | {
"start": 19756,
"end": 21709
} | interface ____ {
/**
* AWS CloudWatch (camel-aws2-cw)
* Sending metrics to AWS CloudWatch.
*
* Category: cloud,monitoring
* Since: 3.1
* Maven coordinates: org.apache.camel:camel-aws2-cw
*
* @return the dsl builder for the headers' name.
*/
default Cw2HeaderNameBuilder aws2Cw() {
return Cw2HeaderNameBuilder.INSTANCE;
}
/**
* AWS CloudWatch (camel-aws2-cw)
* Sending metrics to AWS CloudWatch.
*
* Category: cloud,monitoring
* Since: 3.1
* Maven coordinates: org.apache.camel:camel-aws2-cw
*
* Syntax: <code>aws2-cw:namespace</code>
*
* Path parameter: namespace (required)
* The metric namespace
*
* @param path namespace
* @return the dsl builder
*/
default Cw2EndpointBuilder aws2Cw(String path) {
return Cw2EndpointBuilderFactory.endpointBuilder("aws2-cw", path);
}
/**
* AWS CloudWatch (camel-aws2-cw)
* Sending metrics to AWS CloudWatch.
*
* Category: cloud,monitoring
* Since: 3.1
* Maven coordinates: org.apache.camel:camel-aws2-cw
*
* Syntax: <code>aws2-cw:namespace</code>
*
* Path parameter: namespace (required)
* The metric namespace
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path namespace
* @return the dsl builder
*/
default Cw2EndpointBuilder aws2Cw(String componentName, String path) {
return Cw2EndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
/**
* The builder of headers' name for the AWS CloudWatch component.
*/
public static | Cw2Builders |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/conditional/propertyname/targetpropertyname/ConditionalMethodWithTargetPropertyNameInContextMapper.java | {
"start": 1533,
"end": 2114
} | class ____ {
Set<String> visited = new LinkedHashSet<>();
@Condition
public boolean isNotBlank(String value, @TargetPropertyName String propName) {
visited.add( propName );
return value != null && !value.trim().isEmpty();
}
}
@Mapping(target = "country", source = "originCountry")
@Mapping(target = "addresses", source = "originAddresses")
Employee map(EmployeeDto employee, @Context PresenceUtilsAllProps utils);
Address map(AddressDto addressDto, @Context PresenceUtilsAllProps utils);
| PresenceUtils |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/jsonFormatVisitors/JsonStringFormatVisitor.java | {
"start": 361,
"end": 458
} | class ____ extends JsonValueFormatVisitor.Base
implements JsonStringFormatVisitor { }
}
| Base |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/util/clhm/EntryWeigher.java | {
"start": 1055,
"end": 1413
} | interface ____<K, V> {
/**
* Measures an entry's weight to determine how many units of capacity that
* the key and value consumes. An entry must consume a minimum of one unit.
*
* @param key the key to weigh
* @param value the value to weigh
* @return the entry's weight
*/
int weightOf(K key, V value);
}
| EntryWeigher |
java | elastic__elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/ShardSearchRequestInterceptor.java | {
"start": 933,
"end": 3063
} | class ____ extends FieldAndDocumentLevelSecurityRequestInterceptor {
private static final Logger logger = LogManager.getLogger(ShardSearchRequestInterceptor.class);
public ShardSearchRequestInterceptor(ThreadPool threadPool, XPackLicenseState licenseState) {
super(threadPool.getThreadContext(), licenseState);
}
@Override
void disableFeatures(
IndicesRequest indicesRequest,
Map<String, IndicesAccessControl.IndexAccessControl> indexAccessControlByIndex,
ActionListener<Void> listener
) {
final ShardSearchRequest request = (ShardSearchRequest) indicesRequest;
if (dlsUsesStoredScripts(request, indexAccessControlByIndex)) {
logger.debug("Disable shard search request cache because DLS queries use stored scripts");
request.requestCache(false);
}
listener.onResponse(null);
}
@Override
String[] requestIndices(IndicesRequest indicesRequest) {
final ShardSearchRequest request = (ShardSearchRequest) indicesRequest;
return new String[] { request.shardId().getIndexName() };
}
@Override
public boolean supports(IndicesRequest request) {
return request instanceof ShardSearchRequest;
}
static boolean dlsUsesStoredScripts(
ShardSearchRequest request,
Map<String, IndicesAccessControl.IndexAccessControl> indexAccessControlByIndex
) {
final String indexName = request.shardId().getIndexName();
final IndicesAccessControl.IndexAccessControl indexAccessControl = indexAccessControlByIndex.get(indexName);
assert indexAccessControl != null : "index access control cannot be null";
final DocumentPermissions documentPermissions = indexAccessControl.getDocumentPermissions();
if (documentPermissions.hasDocumentLevelPermissions()) {
try {
return documentPermissions.hasStoredScript();
} catch (IOException e) {
throw new ElasticsearchException(e);
}
} else {
return false;
}
}
}
| ShardSearchRequestInterceptor |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-links/deployment/src/test/java/io/quarkus/resteasy/reactive/links/deployment/HalLinksWithJsonbTest.java | {
"start": 273,
"end": 1165
} | class ____ extends AbstractHalLinksTest {
@RegisterExtension
static final QuarkusProdModeTest TEST = new QuarkusProdModeTest()
.withApplicationRoot((jar) -> jar
.addClasses(AbstractId.class, AbstractEntity.class, TestRecord.class, TestResource.class,
TestRecordWithIdAndPersistenceIdAndRestLinkId.class, TestRecordWithIdAndRestLinkId.class,
TestRecordWithIdAndPersistenceId.class, TestRecordWithPersistenceId.class,
TestRecordWithRestLinkId.class, TestRecordWithPersistenceIdAndRestLinkId.class))
.setForcedDependencies(List.of(
Dependency.of("io.quarkus", "quarkus-rest-jsonb", Version.getVersion()),
Dependency.of("io.quarkus", "quarkus-hal", Version.getVersion())))
.setRun(true);
}
| HalLinksWithJsonbTest |
java | quarkusio__quarkus | integration-tests/spring-data-jpa/src/main/java/io/quarkus/it/spring/data/jpa/Order.java | {
"start": 224,
"end": 636
} | class ____ extends AbstractEntity {
@OneToOne(fetch = FetchType.LAZY)
private Cart cart;
public Order() {
}
public Order(Cart cart) {
this.cart = cart;
}
public Order(Long id, Cart cart) {
this.id = id;
this.cart = cart;
}
public Cart getCart() {
return cart;
}
public void setCart(Cart cart) {
this.cart = cart;
}
}
| Order |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/source/internal/hbm/FormulaImpl.java | {
"start": 247,
"end": 794
} | class ____
extends AbstractHbmSourceNode
implements DerivedValueSource {
private final String tableName;
private final String expression;
FormulaImpl(MappingDocument mappingDocument, String tableName, String expression) {
super( mappingDocument );
this.tableName = tableName;
this.expression = expression;
}
@Override
public Nature getNature() {
return Nature.DERIVED;
}
@Override
public String getExpression() {
return expression;
}
@Override
public String getContainingTableName() {
return tableName;
}
}
| FormulaImpl |
java | apache__camel | components/camel-jaxb/src/test/java/org/apache/camel/jaxb/TimerBeanToBeanNotificationTypeTest.java | {
"start": 1042,
"end": 1850
} | class ____ extends CamelTestSupport {
@Test
public void testBeanToBean() throws Exception {
getMockEndpoint("mock:notify").expectedMessageCount(1);
getMockEndpoint("mock:notify").message(0).body().isInstanceOf(NotificationType.class);
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("timer:foo?delay=500&repeatCount=1")
.log("Timer triggered")
.bean(MyNotificationService.class, "createNotification")
.bean(MyNotificationService.class, "sendNotification");
}
};
}
}
| TimerBeanToBeanNotificationTypeTest |
java | junit-team__junit5 | junit-vintage-engine/src/main/java/org/junit/vintage/engine/execution/VintageExecutor.java | {
"start": 1510,
"end": 7121
} | class ____ {
private static final Logger logger = LoggerFactory.getLogger(VintageExecutor.class);
private static final int DEFAULT_THREAD_POOL_SIZE = Runtime.getRuntime().availableProcessors();
private static final int SHUTDOWN_TIMEOUT_SECONDS = 30;
private final VintageEngineDescriptor engineDescriptor;
private final EngineExecutionListener engineExecutionListener;
private final ConfigurationParameters configurationParameters;
private final boolean parallelExecutionEnabled;
private final boolean classes;
private final boolean methods;
public VintageExecutor(VintageEngineDescriptor engineDescriptor, EngineExecutionListener engineExecutionListener,
ConfigurationParameters configurationParameters) {
this.engineDescriptor = engineDescriptor;
this.engineExecutionListener = engineExecutionListener;
this.configurationParameters = configurationParameters;
this.parallelExecutionEnabled = configurationParameters.getBoolean(Constants.PARALLEL_EXECUTION_ENABLED).orElse(
false);
this.classes = configurationParameters.getBoolean(Constants.PARALLEL_CLASS_EXECUTION).orElse(false);
this.methods = configurationParameters.getBoolean(Constants.PARALLEL_METHOD_EXECUTION).orElse(false);
}
public void executeAllChildren(CancellationToken cancellationToken) {
if (!parallelExecutionEnabled) {
executeClassesAndMethodsSequentially(cancellationToken);
return;
}
if (!classes && !methods) {
logger.warn(() -> "Parallel execution is enabled but no scope is defined. "
+ "Falling back to sequential execution.");
executeClassesAndMethodsSequentially(cancellationToken);
return;
}
boolean wasInterrupted = executeInParallel(cancellationToken);
if (wasInterrupted) {
Thread.currentThread().interrupt();
}
}
private void executeClassesAndMethodsSequentially(CancellationToken cancellationToken) {
RunnerExecutor runnerExecutor = new RunnerExecutor(engineExecutionListener, cancellationToken);
for (Iterator<TestDescriptor> iterator = engineDescriptor.getModifiableChildren().iterator(); iterator.hasNext();) {
runnerExecutor.execute((RunnerTestDescriptor) iterator.next());
iterator.remove();
}
}
private boolean executeInParallel(CancellationToken cancellationToken) {
ExecutorService executorService = Executors.newWorkStealingPool(getThreadPoolSize());
RunnerExecutor runnerExecutor = new RunnerExecutor(engineExecutionListener, cancellationToken);
List<RunnerTestDescriptor> runnerTestDescriptors = collectRunnerTestDescriptors(executorService);
if (!classes) {
executeClassesSequentially(runnerTestDescriptors, runnerExecutor);
return false;
}
return executeClassesInParallel(runnerTestDescriptors, runnerExecutor, executorService);
}
private int getThreadPoolSize() {
Optional<String> optionalPoolSize = configurationParameters.get(Constants.PARALLEL_POOL_SIZE);
if (optionalPoolSize.isPresent()) {
try {
int poolSize = Integer.parseInt(optionalPoolSize.get());
if (poolSize > 0) {
return poolSize;
}
logger.warn(() -> "Invalid value for parallel pool size: " + poolSize);
}
catch (NumberFormatException e) {
logger.warn(() -> "Invalid value for parallel pool size: " + optionalPoolSize.get());
}
}
return DEFAULT_THREAD_POOL_SIZE;
}
private List<RunnerTestDescriptor> collectRunnerTestDescriptors(ExecutorService executorService) {
return engineDescriptor.getModifiableChildren().stream() //
.map(RunnerTestDescriptor.class::cast) //
.map(it -> methods ? parallelMethodExecutor(it, executorService) : it) //
.toList();
}
private RunnerTestDescriptor parallelMethodExecutor(RunnerTestDescriptor runnerTestDescriptor,
ExecutorService executorService) {
runnerTestDescriptor.setExecutorService(executorService);
return runnerTestDescriptor;
}
private void executeClassesSequentially(List<RunnerTestDescriptor> runnerTestDescriptors,
RunnerExecutor runnerExecutor) {
for (RunnerTestDescriptor runnerTestDescriptor : runnerTestDescriptors) {
runnerExecutor.execute(runnerTestDescriptor);
}
}
private boolean executeClassesInParallel(List<RunnerTestDescriptor> runnerTestDescriptors,
RunnerExecutor runnerExecutor, ExecutorService executorService) {
List<CompletableFuture<Void>> futures = new ArrayList<>();
for (RunnerTestDescriptor runnerTestDescriptor : runnerTestDescriptors) {
CompletableFuture<Void> future = CompletableFuture.runAsync(
() -> runnerExecutor.execute(runnerTestDescriptor), executorService);
futures.add(future);
}
CompletableFuture<Void> allOf = CompletableFuture.allOf(futures.toArray(new CompletableFuture<?>[0]));
boolean wasInterrupted = false;
try {
allOf.get();
}
catch (InterruptedException e) {
logger.warn(e, () -> "Interruption while waiting for parallel test execution to finish");
wasInterrupted = true;
}
catch (ExecutionException e) {
throw ExceptionUtils.throwAsUncheckedException(requireNonNullElse(e.getCause(), e));
}
finally {
shutdownExecutorService(executorService);
}
return wasInterrupted;
}
private void shutdownExecutorService(ExecutorService executorService) {
try {
executorService.shutdown();
if (!executorService.awaitTermination(SHUTDOWN_TIMEOUT_SECONDS, TimeUnit.SECONDS)) {
logger.warn(() -> "Executor service did not terminate within the specified timeout");
executorService.shutdownNow();
}
}
catch (InterruptedException e) {
logger.warn(e, () -> "Interruption while waiting for executor service to shut down");
Thread.currentThread().interrupt();
}
}
}
| VintageExecutor |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java | {
"start": 1018,
"end": 1286
} | class ____<E extends Enum<E> & HttpOpParam.Op>
extends EnumParam<E> {
/** Parameter name. */
public static final String NAME = "op";
/** Default parameter value. */
public static final String DEFAULT = NULL;
/** Http operation types */
public | HttpOpParam |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/TypeConverterDelegate.java | {
"start": 1933,
"end": 9144
} | class ____ {
private static final Log logger = LogFactory.getLog(TypeConverterDelegate.class);
private final PropertyEditorRegistrySupport propertyEditorRegistry;
private final @Nullable Object targetObject;
/**
* Create a new TypeConverterDelegate for the given editor registry.
* @param propertyEditorRegistry the editor registry to use
*/
public TypeConverterDelegate(PropertyEditorRegistrySupport propertyEditorRegistry) {
this(propertyEditorRegistry, null);
}
/**
* Create a new TypeConverterDelegate for the given editor registry and bean instance.
* @param propertyEditorRegistry the editor registry to use
* @param targetObject the target object to work on (as context that can be passed to editors)
*/
public TypeConverterDelegate(PropertyEditorRegistrySupport propertyEditorRegistry, @Nullable Object targetObject) {
this.propertyEditorRegistry = propertyEditorRegistry;
this.targetObject = targetObject;
}
/**
* Convert the value to the required type for the specified property.
* @param propertyName name of the property
* @param oldValue the previous value, if available (may be {@code null})
* @param newValue the proposed new value
* @param requiredType the type we must convert to
* (or {@code null} if not known, for example in case of a collection element)
* @return the new value, possibly the result of type conversion
* @throws IllegalArgumentException if type conversion failed
*/
public <T> @Nullable T convertIfNecessary(@Nullable String propertyName, @Nullable Object oldValue,
Object newValue, @Nullable Class<T> requiredType) throws IllegalArgumentException {
return convertIfNecessary(propertyName, oldValue, newValue, requiredType, TypeDescriptor.valueOf(requiredType));
}
/**
* Convert the value to the required type (if necessary from a String),
* for the specified property.
* @param propertyName name of the property
* @param oldValue the previous value, if available (may be {@code null})
* @param newValue the proposed new value
* @param requiredType the type we must convert to
* (or {@code null} if not known, for example in case of a collection element)
* @param typeDescriptor the descriptor for the target property or field
* @return the new value, possibly the result of type conversion
* @throws IllegalArgumentException if type conversion failed
*/
@SuppressWarnings("unchecked")
public <T> @Nullable T convertIfNecessary(@Nullable String propertyName, @Nullable Object oldValue, @Nullable Object newValue,
@Nullable Class<T> requiredType, @Nullable TypeDescriptor typeDescriptor) throws IllegalArgumentException {
// Custom editor for this type?
PropertyEditor editor = this.propertyEditorRegistry.findCustomEditor(requiredType, propertyName);
ConversionFailedException conversionAttemptEx = null;
// No custom editor but custom ConversionService specified?
ConversionService conversionService = this.propertyEditorRegistry.getConversionService();
if (editor == null && conversionService != null && newValue != null && typeDescriptor != null) {
TypeDescriptor sourceTypeDesc = TypeDescriptor.forObject(newValue);
if (conversionService.canConvert(sourceTypeDesc, typeDescriptor)) {
try {
return (T) conversionService.convert(newValue, sourceTypeDesc, typeDescriptor);
}
catch (ConversionFailedException ex) {
// fallback to default conversion logic below
conversionAttemptEx = ex;
}
}
}
Object convertedValue = newValue;
// Value not of required type?
if (editor != null || (requiredType != null && !ClassUtils.isAssignableValue(requiredType, convertedValue))) {
if (typeDescriptor != null && requiredType != null && Collection.class.isAssignableFrom(requiredType)) {
TypeDescriptor elementTypeDesc = typeDescriptor.getElementTypeDescriptor();
if (elementTypeDesc != null) {
Class<?> elementType = elementTypeDesc.getType();
if (convertedValue instanceof String text) {
if (Class.class == elementType || Enum.class.isAssignableFrom(elementType)) {
convertedValue = StringUtils.commaDelimitedListToStringArray(text);
}
if (editor == null && String.class != elementType) {
editor = findDefaultEditor(elementType.arrayType());
}
}
}
}
if (editor == null) {
editor = findDefaultEditor(requiredType);
}
convertedValue = doConvertValue(oldValue, convertedValue, requiredType, editor);
}
boolean standardConversion = false;
if (requiredType != null) {
// Try to apply some standard type conversion rules if appropriate.
if (convertedValue != null) {
if (Object.class == requiredType) {
return (T) convertedValue;
}
else if (requiredType.isArray()) {
// Array required -> apply appropriate conversion of elements.
if (convertedValue instanceof String text &&
Enum.class.isAssignableFrom(requiredType.componentType())) {
convertedValue = StringUtils.commaDelimitedListToStringArray(text);
}
return (T) convertToTypedArray(convertedValue, propertyName, requiredType.componentType());
}
else if (convertedValue.getClass().isArray()) {
if (Collection.class.isAssignableFrom(requiredType)) {
convertedValue = convertToTypedCollection(CollectionUtils.arrayToList(convertedValue),
propertyName, requiredType, typeDescriptor);
standardConversion = true;
}
else if (Array.getLength(convertedValue) == 1) {
convertedValue = Array.get(convertedValue, 0);
standardConversion = true;
}
}
else if (convertedValue instanceof Collection<?> coll) {
// Convert elements to target type, if determined.
convertedValue = convertToTypedCollection(coll, propertyName, requiredType, typeDescriptor);
standardConversion = true;
}
else if (convertedValue instanceof Map<?, ?> map) {
// Convert keys and values to respective target type, if determined.
convertedValue = convertToTypedMap(map, propertyName, requiredType, typeDescriptor);
standardConversion = true;
}
if (String.class == requiredType && ClassUtils.isPrimitiveOrWrapper(convertedValue.getClass())) {
// We can stringify any primitive value...
return (T) convertedValue.toString();
}
else if (convertedValue instanceof String text && !requiredType.isInstance(convertedValue)) {
if (conversionAttemptEx == null && !requiredType.isInterface() && !requiredType.isEnum()) {
try {
Constructor<T> strCtor = requiredType.getConstructor(String.class);
return BeanUtils.instantiateClass(strCtor, convertedValue);
}
catch (NoSuchMethodException ex) {
// proceed with field lookup
if (logger.isTraceEnabled()) {
logger.trace("No String constructor found on type [" + requiredType.getName() + "]", ex);
}
}
catch (Exception ex) {
if (logger.isDebugEnabled()) {
logger.debug("Construction via String failed for type [" + requiredType.getName() + "]", ex);
}
}
}
String trimmedValue = text.trim();
if (requiredType.isEnum() && trimmedValue.isEmpty()) {
// It's an empty | TypeConverterDelegate |
java | grpc__grpc-java | benchmarks/src/main/java/io/grpc/benchmarks/qps/AbstractConfigurationBuilder.java | {
"start": 986,
"end": 1052
} | class ____ all {@link Configuration.Builder}s.
*/
public abstract | for |
java | apache__camel | components/camel-undertow/src/generated/java/org/apache/camel/component/undertow/UndertowEndpointUriFactory.java | {
"start": 518,
"end": 3304
} | class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = ":httpURI";
private static final Set<String> PROPERTY_NAMES;
private static final Set<String> SECRET_PROPERTY_NAMES;
private static final Map<String, String> MULTI_VALUE_PREFIXES;
static {
Set<String> props = new HashSet<>(30);
props.add("accessLog");
props.add("accessLogReceiver");
props.add("allowedRoles");
props.add("bridgeErrorHandler");
props.add("cookieHandler");
props.add("exceptionHandler");
props.add("exchangePattern");
props.add("fireWebSocketChannelEvents");
props.add("handlers");
props.add("headerFilterStrategy");
props.add("httpMethodRestrict");
props.add("httpURI");
props.add("keepAlive");
props.add("lazyStartProducer");
props.add("matchOnUriPrefix");
props.add("muteException");
props.add("options");
props.add("optionsEnabled");
props.add("preserveHostHeader");
props.add("reuseAddresses");
props.add("securityConfiguration");
props.add("securityProvider");
props.add("sendTimeout");
props.add("sendToAll");
props.add("sslContextParameters");
props.add("tcpNoDelay");
props.add("throwExceptionOnFailure");
props.add("transferException");
props.add("undertowHttpBinding");
props.add("useStreaming");
PROPERTY_NAMES = Collections.unmodifiableSet(props);
SECRET_PROPERTY_NAMES = Collections.emptySet();
Map<String, String> prefixes = new HashMap<>(1);
prefixes.put("options", "option.");
MULTI_VALUE_PREFIXES = Collections.unmodifiableMap(prefixes);
}
@Override
public boolean isEnabled(String scheme) {
return "undertow".equals(scheme);
}
@Override
public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "httpURI", null, true, copy);
uri = buildQueryParameters(uri, copy, encode);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public Set<String> secretPropertyNames() {
return SECRET_PROPERTY_NAMES;
}
@Override
public Map<String, String> multiValuePrefixes() {
return MULTI_VALUE_PREFIXES;
}
@Override
public boolean isLenientProperties() {
return true;
}
}
| UndertowEndpointUriFactory |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/state/internals/WindowKeySchemaTest.java | {
"start": 4395,
"end": 7054
} | interface ____<A, B, C, R> {
R apply(A a, B b, C c);
}
private static final Map<SchemaType, TriFunction<byte[], Long, Integer, Bytes>> BYTES_TO_STORE_BINARY_MAP = mkMap(
mkEntry(SchemaType.WindowKeySchema, WindowKeySchema::toStoreKeyBinary),
mkEntry(SchemaType.PrefixedKeyFirstSchema, KeyFirstWindowKeySchema::toStoreKeyBinary),
mkEntry(SchemaType.PrefixedTimeFirstSchema, TimeFirstWindowKeySchema::toStoreKeyBinary)
);
private static final Map<SchemaType, TriFunction<Windowed<String>, Integer, StateSerdes<String, byte[]>, Bytes>> SERDE_TO_STORE_BINARY_MAP = mkMap(
mkEntry(SchemaType.WindowKeySchema, WindowKeySchema::toStoreKeyBinary),
mkEntry(SchemaType.PrefixedKeyFirstSchema, KeyFirstWindowKeySchema::toStoreKeyBinary),
mkEntry(SchemaType.PrefixedTimeFirstSchema, TimeFirstWindowKeySchema::toStoreKeyBinary)
);
private static final Map<SchemaType, Function<byte[], Long>> EXTRACT_TS_MAP = mkMap(
mkEntry(SchemaType.WindowKeySchema, WindowKeySchema::extractStoreTimestamp),
mkEntry(SchemaType.PrefixedKeyFirstSchema, KeyFirstWindowKeySchema::extractStoreTimestamp),
mkEntry(SchemaType.PrefixedTimeFirstSchema, TimeFirstWindowKeySchema::extractStoreTimestamp)
);
private static final Map<SchemaType, Function<byte[], Integer>> EXTRACT_SEQ_MAP = mkMap(
mkEntry(SchemaType.WindowKeySchema, WindowKeySchema::extractStoreSequence),
mkEntry(SchemaType.PrefixedKeyFirstSchema, KeyFirstWindowKeySchema::extractStoreSequence),
mkEntry(SchemaType.PrefixedTimeFirstSchema, TimeFirstWindowKeySchema::extractStoreSequence)
);
private static final Map<SchemaType, Function<byte[], byte[]>> FROM_WINDOW_KEY_MAP = mkMap(
mkEntry(SchemaType.PrefixedKeyFirstSchema, KeyFirstWindowKeySchema::fromNonPrefixWindowKey),
mkEntry(SchemaType.PrefixedTimeFirstSchema, TimeFirstWindowKeySchema::fromNonPrefixWindowKey)
);
private final String key = "key";
private final String topic = "topic";
private final long startTime = 50L;
private final long endTime = 100L;
private final Serde<String> serde = Serdes.String();
private final Window window = new TimeWindow(startTime, endTime);
private final Windowed<String> windowedKey = new Windowed<>(key, window);
private KeySchema keySchema;
private final Serde<Windowed<String>> keySerde = new WindowedSerdes.TimeWindowedSerde<>(serde, endTime - startTime);
private final StateSerdes<String, byte[]> stateSerdes = new StateSerdes<>("dummy", serde, Serdes.ByteArray());
public SchemaType schemaType;
private | TriFunction |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/MonoDematerialize.java | {
"start": 798,
"end": 1272
} | class ____<T> extends InternalMonoOperator<Signal<T>, T> {
MonoDematerialize(Mono<Signal<T>> source) {
super(source);
}
@Override
public CoreSubscriber<? super Signal<T>> subscribeOrReturn(CoreSubscriber<? super T> actual) {
return new FluxDematerialize.DematerializeSubscriber<>(actual, true);
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return super.scanUnsafe(key);
}
}
| MonoDematerialize |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/checkpointing/CheckpointFailureManagerITCase.java | {
"start": 12156,
"end": 12600
} | class ____
implements CheckpointStorageFactory<FailingFinalizationCheckpointStorage> {
@Override
public FailingFinalizationCheckpointStorage createFromConfig(
ReadableConfig config, ClassLoader classLoader)
throws IllegalConfigurationException {
return new FailingFinalizationCheckpointStorage();
}
}
private static | FailingFinalizationCheckpointStorageFactory |
java | apache__camel | dsl/camel-endpointdsl/src/test/java/org/apache/camel/builder/endpoint/QuartzTest.java | {
"start": 1255,
"end": 2601
} | class ____ extends BaseEndpointDslTest {
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testQuartz() throws Exception {
context.start();
context.addRoutes(new EndpointRouteBuilder() {
@Override
public void configure() throws Exception {
QuartzEndpointBuilderFactory.QuartzEndpointBuilder builder = quartz("myGroup/myTrigger").cron("0/1+*+*+*+*+?");
Endpoint endpoint = builder.resolve(context);
assertNotNull(endpoint);
QuartzEndpoint qe = assertIsInstanceOf(QuartzEndpoint.class, endpoint);
assertEquals("0/1 * * * * ?", qe.getCron());
assertEquals("myGroup", qe.getGroupName());
assertEquals("myTrigger", qe.getTriggerName());
builder = quartz("myGroup2/myTrigger2").cron("0/2 * * * * ?");
endpoint = builder.resolve(context);
assertNotNull(endpoint);
qe = assertIsInstanceOf(QuartzEndpoint.class, endpoint);
assertEquals("0/2 * * * * ?", qe.getCron());
assertEquals("myGroup2", qe.getGroupName());
assertEquals("myTrigger2", qe.getTriggerName());
}
});
context.stop();
}
}
| QuartzTest |
java | google__dagger | javatests/dagger/functional/names/ComponentFactoryNameConflictsTest.java | {
"start": 1515,
"end": 1844
} | interface ____ {
CreateUsage getCreateUsage();
}
@Test
public void testCreate() {
CreateComponent testComponent =
DaggerComponentFactoryNameConflictsTest_CreateComponent.create();
CreateUsage createUsage = testComponent.getCreateUsage();
assertThat(createUsage).isNotNull();
}
// A | CreateComponent |
java | apache__maven | impl/maven-di/src/test/java/org/apache/maven/di/impl/InjectorImplTest.java | {
"start": 11646,
"end": 11694
} | class ____ {
@Named
| NullableOnField |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_2101/Issue2101AdditionalMapper.java | {
"start": 1119,
"end": 1240
} | class ____ {
public String nestedValue1;
public String nestedValue2;
}
//CHECKSTYLE:ON
}
| NestedSource |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/nullness/FieldMissingNullableTest.java | {
"start": 18270,
"end": 18401
} | class ____ {}
}
""")
.addOutputLines(
"out/Test.java",
"""
| Nullable |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/identity/IdentityColumnSupport.java | {
"start": 493,
"end": 4128
} | interface ____ {
/**
* Does this dialect support identity column key generation?
*
* @return True if IDENTITY columns are supported; false otherwise.
*/
boolean supportsIdentityColumns();
/**
* Does the dialect support some form of inserting and selecting
* the generated IDENTITY value all in the same statement.
*
* @return True if the dialect supports selecting the just
* generated IDENTITY in the insert statement.
*/
boolean supportsInsertSelectIdentity();
/**
* Whether this dialect have an Identity clause added to the data type or a
* completely separate identity data type
*
* @return boolean
*/
boolean hasDataTypeInIdentityColumn();
/**
* Provided we {@link #supportsInsertSelectIdentity}, then attach the
* "select identity" clause to the insert statement.
* <p>
* Note, if {@link #supportsInsertSelectIdentity} == false then
* the insert-string should be returned without modification.
*
* @param insertString The insert command
*
* @return The insert command with any necessary identity select
* clause attached.
*
* @deprecated Use {@link #appendIdentitySelectToInsert(String, String)} instead.
*/
@Deprecated( forRemoval = true, since = "6.5" )
String appendIdentitySelectToInsert(String insertString);
/**
* Provided we {@link #supportsInsertSelectIdentity}, then attach the
* "select identity" clause to the insert statement.
* <p>
* Note, if {@link #supportsInsertSelectIdentity} == false then
* the insert-string should be returned without modification.
*
* @param identityColumnName The name of the identity column
* @param insertString The insert command
*
* @return The insert command with any necessary identity select
* clause attached.
*/
default String appendIdentitySelectToInsert(String identityColumnName, String insertString) {
return appendIdentitySelectToInsert( insertString );
}
/**
* Get the select command to use to retrieve the last generated IDENTITY
* value for a particular table
*
* @param table The table into which the insert was done
* @param column The PK column.
* @param type The {@link java.sql.Types} type code.
*
* @return The appropriate select command
*
* @throws MappingException If IDENTITY generation is not supported.
*/
String getIdentitySelectString(String table, String column, int type) throws MappingException;
/**
* The syntax used during DDL to define a column as being an IDENTITY of
* a particular type.
*
* @param type The {@link java.sql.Types} type code.
*
* @return The appropriate DDL fragment.
*
* @throws MappingException If IDENTITY generation is not supported.
*/
String getIdentityColumnString(int type) throws MappingException;
/**
* The keyword used to insert a generated value into an identity column (or null).
* Need if the dialect does not support inserts that specify no column values.
*
* @return The appropriate keyword.
*/
String getIdentityInsertString();
/**
* Is there a keyword used to insert a generated value into an identity column.
*
* @return {@code true} if the dialect does not support inserts that specify no column values.
*/
default boolean hasIdentityInsertKeyword() {
return getIdentityInsertString() != null;
}
/**
* The delegate for dealing with {@code IDENTITY} columns using
* {@link java.sql.PreparedStatement#getGeneratedKeys}.
*
* @param persister The persister
*
* @return the dialect-specific {@link GetGeneratedKeysDelegate}
*/
GetGeneratedKeysDelegate buildGetGeneratedKeysDelegate(EntityPersister persister);
}
| IdentityColumnSupport |
java | spring-projects__spring-framework | spring-context-support/src/main/java/org/springframework/ui/freemarker/FreeMarkerTemplateUtils.java | {
"start": 1018,
"end": 1930
} | class ____ {
/**
* Process the specified FreeMarker template with the given model and write
* the result to a String.
* <p>When using this method to prepare text for a mail to be sent with Spring's
* mail support, consider wrapping IO/TemplateException in MailPreparationException.
* @param model the model object, typically a Map that contains model names
* as keys and model objects as values
* @return the result as a String
* @throws IOException if the template wasn't found or couldn't be read
* @throws freemarker.template.TemplateException if rendering failed
* @see org.springframework.mail.MailPreparationException
*/
public static String processTemplateIntoString(Template template, Object model)
throws IOException, TemplateException {
StringWriter result = new StringWriter(1024);
template.process(model, result);
return result.toString();
}
}
| FreeMarkerTemplateUtils |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiEmbeddingsRequestManager.java | {
"start": 1535,
"end": 3505
} | class ____ extends AzureOpenAiRequestManager {
private static final Logger logger = LogManager.getLogger(AzureOpenAiEmbeddingsRequestManager.class);
private static final ResponseHandler HANDLER = createEmbeddingsHandler();
private static ResponseHandler createEmbeddingsHandler() {
return new AzureOpenAiResponseHandler("azure openai text embedding", OpenAiEmbeddingsResponseEntity::fromResponse, false);
}
public static AzureOpenAiEmbeddingsRequestManager of(AzureOpenAiEmbeddingsModel model, Truncator truncator, ThreadPool threadPool) {
return new AzureOpenAiEmbeddingsRequestManager(
Objects.requireNonNull(model),
Objects.requireNonNull(truncator),
Objects.requireNonNull(threadPool)
);
}
private final Truncator truncator;
private final AzureOpenAiEmbeddingsModel model;
public AzureOpenAiEmbeddingsRequestManager(AzureOpenAiEmbeddingsModel model, Truncator truncator, ThreadPool threadPool) {
super(threadPool, model);
this.model = Objects.requireNonNull(model);
this.truncator = Objects.requireNonNull(truncator);
}
@Override
public void execute(
InferenceInputs inferenceInputs,
RequestSender requestSender,
Supplier<Boolean> hasRequestCompletedFunction,
ActionListener<InferenceServiceResults> listener
) {
EmbeddingsInput input = inferenceInputs.castTo(EmbeddingsInput.class);
List<String> docsInput = input.getTextInputs();
InputType inputType = input.getInputType();
var truncatedInput = truncate(docsInput, model.getServiceSettings().maxInputTokens());
AzureOpenAiEmbeddingsRequest request = new AzureOpenAiEmbeddingsRequest(truncator, truncatedInput, inputType, model);
execute(new ExecutableInferenceRequest(requestSender, logger, request, HANDLER, hasRequestCompletedFunction, listener));
}
}
| AzureOpenAiEmbeddingsRequestManager |
java | quarkusio__quarkus | extensions/oidc-token-propagation-common/deployment/src/main/java/io/quarkus/oidc/token/propagation/common/deployment/AccessTokenInstanceBuildItem.java | {
"start": 337,
"end": 1543
} | class ____ extends MultiBuildItem {
private final String clientName;
private final boolean tokenExchange;
private final AnnotationTarget annotationTarget;
private final MethodInfo targetMethodInfo;
AccessTokenInstanceBuildItem(String clientName, Boolean tokenExchange, AnnotationTarget annotationTarget,
MethodInfo targetMethodInfo) {
this.clientName = Objects.requireNonNull(clientName);
this.tokenExchange = tokenExchange;
this.annotationTarget = Objects.requireNonNull(annotationTarget);
this.targetMethodInfo = targetMethodInfo;
}
String getClientName() {
return clientName;
}
boolean exchangeTokenActivated() {
return tokenExchange;
}
public AnnotationTarget getAnnotationTarget() {
return annotationTarget;
}
public String targetClass() {
if (annotationTarget.kind() == AnnotationTarget.Kind.CLASS) {
return annotationTarget.asClass().name().toString();
}
return annotationTarget.asMethod().declaringClass().name().toString();
}
MethodInfo getTargetMethodInfo() {
return targetMethodInfo;
}
}
| AccessTokenInstanceBuildItem |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-jsonb/deployment/src/test/java/io/quarkus/resteasy/reactive/jsonb/deployment/test/SimpleJsonResource.java | {
"start": 623,
"end": 5434
} | class ____ extends SuperClass<Person> {
@GET
@Path("/person")
public Person getPerson() {
Person person = new Person();
person.setFirst("Bob");
person.setLast("Builder");
return person;
}
@POST
@Path("/person")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Person getPerson(Person person) {
if (BlockingOperationControl.isBlockingAllowed()) {
throw new RuntimeException("should not have dispatched");
}
return person;
}
@POST
@Path("/person-custom-mt")
@Produces("application/vnd.quarkus.person-v1+json")
@Consumes("application/vnd.quarkus.person-v1+json")
public Person getPersonCustomMediaType(Person person) {
if (BlockingOperationControl.isBlockingAllowed()) {
throw new RuntimeException("should not have dispatched");
}
return person;
}
@POST
@Path("/person-custom-mt-response")
@Produces("application/vnd.quarkus.person-v1+json")
@Consumes("application/vnd.quarkus.person-v1+json")
public Response getPersonCustomMediaTypeResponse(Person person) {
if (BlockingOperationControl.isBlockingAllowed()) {
throw new RuntimeException("should not have dispatched");
}
return Response.ok(person).status(201).build();
}
@POST
@Path("/person-custom-mt-response-with-type")
@Produces("application/vnd.quarkus.person-v1+json")
@Consumes("application/vnd.quarkus.person-v1+json")
public Response getPersonCustomMediaTypeResponseWithType(Person person) {
if (BlockingOperationControl.isBlockingAllowed()) {
throw new RuntimeException("should not have dispatched");
}
return Response.ok(person).status(201).header("Content-Type", "application/vnd.quarkus.other-v1+json").build();
}
@POST
@Path("/people")
@Consumes(MediaType.APPLICATION_JSON)
public List<Person> getPeople(List<Person> people) {
if (BlockingOperationControl.isBlockingAllowed()) {
throw new RuntimeException("should not have dispatched");
}
List<Person> reversed = new ArrayList<>(people.size());
for (Person person : people) {
reversed.add(0, person);
}
return reversed;
}
@POST
@Path("/strings")
public List<String> strings(List<String> strings) {
if (BlockingOperationControl.isBlockingAllowed()) {
throw new RuntimeException("should not have dispatched");
}
return strings;
}
@POST
@Path("/person-large")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Person personTest(Person person) {
//large requests should get bumped from the IO thread
if (BlockingOperationControl.isBlockingAllowed()) {
throw new RuntimeException("should have dispatched back to event loop");
}
return person;
}
@POST
@Path("/person-validated")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Person getValidatedPerson(@Valid Person person) {
return person;
}
@POST
@Path("/person-invalid-result")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Valid
public Person getInvalidPersonResult(@Valid Person person) {
person.setLast(null);
return person;
}
@GET
@Path("/async-person")
@Produces(MediaType.APPLICATION_JSON)
public void getPerson(@Suspended AsyncResponse response) {
new Thread(new Runnable() {
@Override
public void run() {
Person person = new Person();
person.setFirst("Bob");
person.setLast("Builder");
response.resume(person);
}
}).start();
}
@GET
@Path("/multi1")
public Multi<Person> getMulti1() {
Person person = new Person();
person.setFirst("Bob");
person.setLast("Builder");
return Multi.createFrom().items(person);
}
@GET
@Path("/multi2")
public Multi<Person> getMulti2() {
Person person = new Person();
person.setFirst("Bob");
person.setLast("Builder");
Person person2 = new Person();
person2.setFirst("Bob2");
person2.setLast("Builder2");
return Multi.createFrom().items(person, person2);
}
@GET
@Path("/multi0")
public Multi<Person> getMulti0() {
return Multi.createFrom().empty();
}
@POST
@Path("/genericInput")
public String genericInputTest(DataItem<Item> item) {
return item.getContent().getName();
}
}
| SimpleJsonResource |
java | apache__camel | components/camel-zendesk/src/main/java/org/apache/camel/component/zendesk/ZendeskEndpoint.java | {
"start": 2072,
"end": 4939
} | class ____ extends AbstractApiEndpoint<ZendeskApiName, ZendeskConfiguration>
implements EndpointServiceLocation {
@UriParam
private ZendeskConfiguration configuration;
private Zendesk apiProxy;
public ZendeskEndpoint(String uri, ZendeskComponent component, ZendeskApiName apiName, String methodName,
ZendeskConfiguration endpointConfiguration) {
super(uri, component, apiName, methodName, ZendeskApiCollection.getCollection().getHelper(apiName),
endpointConfiguration);
this.configuration = endpointConfiguration;
}
@Override
public String getServiceUrl() {
return configuration.getServerUrl();
}
@Override
public String getServiceProtocol() {
return "rest";
}
@Override
public Map<String, String> getServiceMetadata() {
if (configuration.getUsername() != null) {
return Map.of("username", configuration.getUsername());
}
return null;
}
@Override
public ZendeskComponent getComponent() {
return (ZendeskComponent) super.getComponent();
}
@Override
public Producer createProducer() throws Exception {
return new ZendeskProducer(this);
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
// make sure inBody is not set for consumers
if (inBody != null) {
throw new IllegalArgumentException("Option inBody is not supported for consumer endpoint");
}
final ZendeskConsumer consumer = new ZendeskConsumer(this, processor);
// also set consumer.* properties
configureConsumer(consumer);
return consumer;
}
@Override
protected void doStart() throws Exception {
super.doStart();
// verify configuration is valid
getZendesk();
}
@Override
public void doStop() throws Exception {
IOHelper.close(apiProxy);
super.doStop();
}
@Override
protected ApiMethodPropertiesHelper<ZendeskConfiguration> getPropertiesHelper() {
return ZendeskPropertiesHelper.getHelper(getCamelContext());
}
@Override
protected String getThreadProfileName() {
return ZendeskConstants.THREAD_PROFILE_NAME;
}
@Override
protected void afterConfigureProperties() {
}
@Override
public Object getApiProxy(ApiMethod method, Map<String, Object> args) {
return getZendesk();
}
private Zendesk getZendesk() {
if (apiProxy == null) {
if (getConfiguration().equals(getComponent().getConfiguration())) {
apiProxy = getComponent().getZendesk();
} else {
apiProxy = ZendeskHelper.create(getConfiguration());
}
}
return apiProxy;
}
}
| ZendeskEndpoint |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/embedded/one2many/Name.java | {
"start": 374,
"end": 761
} | class ____ {
private String first;
private String last;
public Name() {
}
public Name(String first, String last) {
this.first = first;
this.last = last;
}
public String getFirst() {
return first;
}
public void setFirst(String first) {
this.first = first;
}
public String getLast() {
return last;
}
public void setLast(String last) {
this.last = last;
}
}
| Name |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/ClassUtils.java | {
"start": 33631,
"end": 34566
} | class ____ unpackaged, return an empty string.
* </p>
*
* @param className the className to get the package name for, may be {@code null}.
* @return the package name or an empty string.
*/
public static String getPackageName(String className) {
if (StringUtils.isEmpty(className)) {
return StringUtils.EMPTY;
}
int i = 0;
// Strip array encoding
while (className.charAt(i) == '[') {
i++;
}
className = className.substring(i);
// Strip Object type encoding
if (className.charAt(0) == 'L' && className.charAt(className.length() - 1) == ';') {
className = className.substring(1);
}
i = className.lastIndexOf(PACKAGE_SEPARATOR_CHAR);
if (i == -1) {
return StringUtils.EMPTY;
}
return className.substring(0, i);
}
/**
* Gets the primitive | is |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/indices/recovery/RecoverFilesRecoveryException.java | {
"start": 897,
"end": 2141
} | class ____ extends ElasticsearchException implements ElasticsearchWrapperException {
private final int numberOfFiles;
private final ByteSizeValue totalFilesSize;
public RecoverFilesRecoveryException(ShardId shardId, int numberOfFiles, ByteSizeValue totalFilesSize, Throwable cause) {
super("Failed to transfer [{}] files with total size of [{}]", cause, numberOfFiles, totalFilesSize);
Objects.requireNonNull(totalFilesSize, "totalFilesSize must not be null");
setShard(shardId);
this.numberOfFiles = numberOfFiles;
this.totalFilesSize = totalFilesSize;
}
public int numberOfFiles() {
return numberOfFiles;
}
public ByteSizeValue totalFilesSize() {
return totalFilesSize;
}
public RecoverFilesRecoveryException(StreamInput in) throws IOException {
super(in);
numberOfFiles = in.readInt();
totalFilesSize = ByteSizeValue.readFrom(in);
}
@Override
protected void writeTo(StreamOutput out, Writer<Throwable> nestedExceptionsWriter) throws IOException {
super.writeTo(out, nestedExceptionsWriter);
out.writeInt(numberOfFiles);
totalFilesSize.writeTo(out);
}
}
| RecoverFilesRecoveryException |
java | apache__kafka | connect/transforms/src/main/java/org/apache/kafka/connect/transforms/HoistField.java | {
"start": 1474,
"end": 3944
} | class ____<R extends ConnectRecord<R>> implements Transformation<R>, Versioned {
public static final String OVERVIEW_DOC =
"Wrap data using the specified field name in a Struct when schema present, or a Map in the case of schemaless data."
+ "<p/>Use the concrete transformation type designed for the record key (<code>" + Key.class.getName() + "</code>) "
+ "or value (<code>" + Value.class.getName() + "</code>).";
private static final String FIELD_CONFIG = "field";
public static final ConfigDef CONFIG_DEF = new ConfigDef()
.define(FIELD_CONFIG, ConfigDef.Type.STRING, ConfigDef.NO_DEFAULT_VALUE, ConfigDef.Importance.MEDIUM,
"Field name for the single field that will be created in the resulting Struct or Map.");
private Cache<Schema, Schema> schemaUpdateCache;
private String fieldName;
@Override
public void configure(Map<String, ?> props) {
final SimpleConfig config = new SimpleConfig(CONFIG_DEF, props);
fieldName = config.getString("field");
schemaUpdateCache = new SynchronizedCache<>(new LRUCache<>(16));
}
@Override
public R apply(R record) {
final Schema schema = operatingSchema(record);
final Object value = operatingValue(record);
if (schema == null) {
Map<String, Object> updatedValue = new HashMap<>();
updatedValue.put(fieldName, value);
return newRecord(record, null, updatedValue);
} else {
Schema updatedSchema = schemaUpdateCache.get(schema);
if (updatedSchema == null) {
updatedSchema = SchemaBuilder.struct().field(fieldName, schema).build();
schemaUpdateCache.put(schema, updatedSchema);
}
final Struct updatedValue = new Struct(updatedSchema).put(fieldName, value);
return newRecord(record, updatedSchema, updatedValue);
}
}
@Override
public String version() {
return AppInfoParser.getVersion();
}
@Override
public void close() {
schemaUpdateCache = null;
}
@Override
public ConfigDef config() {
return CONFIG_DEF;
}
protected abstract Schema operatingSchema(R record);
protected abstract Object operatingValue(R record);
protected abstract R newRecord(R record, Schema updatedSchema, Object updatedValue);
public static | HoistField |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/observable/ObservableMergeDelayErrorTest.java | {
"start": 19760,
"end": 23454
} | class ____ implements ObservableSource<String> {
Thread t;
@Override
public void subscribe(final Observer<? super String> observer) {
observer.onSubscribe(Disposable.empty());
t = new Thread(new Runnable() {
@Override
public void run() {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
observer.onError(e);
}
observer.onNext("hello");
observer.onComplete();
}
});
t.start();
}
}
@Test
public void mergeIterableDelayError() {
Observable.mergeDelayError(Arrays.asList(Observable.just(1), Observable.just(2)))
.test()
.assertResult(1, 2);
}
@Test
public void mergeArrayDelayError() {
Observable.mergeArrayDelayError(Observable.just(1), Observable.just(2))
.test()
.assertResult(1, 2);
}
@Test
public void mergeIterableDelayErrorWithError() {
Observable.mergeDelayError(
Arrays.asList(Observable.just(1).concatWith(Observable.<Integer>error(new TestException())),
Observable.just(2)))
.test()
.assertFailure(TestException.class, 1, 2);
}
@Test
public void mergeDelayError() {
Observable.mergeDelayError(
Observable.just(Observable.just(1),
Observable.just(2)))
.test()
.assertResult(1, 2);
}
@Test
public void mergeDelayErrorWithError() {
Observable.mergeDelayError(
Observable.just(Observable.just(1).concatWith(Observable.<Integer>error(new TestException())),
Observable.just(2)))
.test()
.assertFailure(TestException.class, 1, 2);
}
@Test
public void mergeDelayErrorMaxConcurrency() {
Observable.mergeDelayError(
Observable.just(Observable.just(1),
Observable.just(2)), 1)
.test()
.assertResult(1, 2);
}
@Test
public void mergeDelayErrorWithErrorMaxConcurrency() {
Observable.mergeDelayError(
Observable.just(Observable.just(1).concatWith(Observable.<Integer>error(new TestException())),
Observable.just(2)), 1)
.test()
.assertFailure(TestException.class, 1, 2);
}
@Test
public void mergeIterableDelayErrorMaxConcurrency() {
Observable.mergeDelayError(
Arrays.asList(Observable.just(1),
Observable.just(2)), 1)
.test()
.assertResult(1, 2);
}
@Test
public void mergeIterableDelayErrorWithErrorMaxConcurrency() {
Observable.mergeDelayError(
Arrays.asList(Observable.just(1).concatWith(Observable.<Integer>error(new TestException())),
Observable.just(2)), 1)
.test()
.assertFailure(TestException.class, 1, 2);
}
@Test
public void mergeDelayError3() {
Observable.mergeDelayError(
Observable.just(1),
Observable.just(2),
Observable.just(3)
)
.test()
.assertResult(1, 2, 3);
}
@Test
public void mergeDelayError3WithError() {
Observable.mergeDelayError(
Observable.just(1),
Observable.just(2).concatWith(Observable.<Integer>error(new TestException())),
Observable.just(3)
)
.test()
.assertFailure(TestException.class, 1, 2, 3);
}
}
| TestASynchronous1sDelayedObservable |
java | bumptech__glide | annotation/compiler/src/main/java/com/bumptech/glide/annotation/compiler/ProcessorUtil.java | {
"start": 20672,
"end": 22087
} | class ____ have multiple Annotations.
if (!annotationClassName.equals(annotationMirror.getAnnotationType().toString())) {
continue;
}
var entries = annotationMirror.getElementValues().entrySet();
if (entries.size() != 1) {
throw new IllegalArgumentException("Expected single value, but found: " + entries);
}
excludedModuleAnnotationValue = entries.iterator().next().getValue();
if (excludedModuleAnnotationValue == null) {
throw new IllegalArgumentException(
"Failed to find value for: "
+ annotationClass
+ " from mirrors: "
+ clazz.getAnnotationMirrors());
}
}
if (excludedModuleAnnotationValue == null) {
return ImmutableSet.of();
}
Object value = excludedModuleAnnotationValue.getValue();
if (value instanceof List) {
LinkedHashSet<String> out = new LinkedHashSet<>();
for (Object o : (List<?>) value) {
AnnotationValue av = (AnnotationValue) o;
out.add(qualifiedNameFromTypeMirror((TypeMirror) av.getValue()));
}
return ImmutableSet.copyOf(out);
} else {
return ImmutableSet.of(qualifiedNameFromTypeMirror((TypeMirror) value));
}
}
static String qualifiedNameFromTypeMirror(TypeMirror type) {
if (type.getKind() == TypeKind.ERROR) {
throw new IllegalArgumentException("Unresolved | may |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/results/graph/entity/internal/EntitySelectFetchInitializer.java | {
"start": 2240,
"end": 12114
} | class ____ extends InitializerData {
// per-row state
protected @Nullable Object entityIdentifier;
public EntitySelectFetchInitializerData(EntitySelectFetchInitializer<?> initializer, RowProcessingState rowProcessingState) {
super( rowProcessingState );
}
/*
* Used by Hibernate Reactive
*/
public EntitySelectFetchInitializerData(EntitySelectFetchInitializerData original) {
super( original );
this.entityIdentifier = original.entityIdentifier;
}
}
public EntitySelectFetchInitializer(
InitializerParent<?> parent,
ToOneAttributeMapping toOneMapping,
NavigablePath fetchedNavigable,
EntityPersister concreteDescriptor,
DomainResult<?> keyResult,
boolean affectedByFilter,
AssemblerCreationState creationState) {
super( creationState );
this.parent = parent;
this.toOneMapping = toOneMapping;
this.navigablePath = fetchedNavigable;
this.concreteDescriptor = concreteDescriptor;
this.affectedByFilter = affectedByFilter;
isPartOfKey = Initializer.isPartOfKey( fetchedNavigable, parent );
keyAssembler = keyResult.createResultAssembler( this, creationState );
isEnhancedForLazyLoading = concreteDescriptor.getBytecodeEnhancementMetadata().isEnhancedForLazyLoading();
keyIsEager = keyAssembler.isEager();
hasLazySubInitializer = keyAssembler.hasLazySubInitializers();
}
@Override
protected InitializerData createInitializerData(RowProcessingState rowProcessingState) {
return new EntitySelectFetchInitializerData( this, rowProcessingState );
}
public ModelPart getInitializedPart(){
return toOneMapping;
}
@Override
public @Nullable InitializerParent<?> getParent() {
return parent;
}
@Override
public NavigablePath getNavigablePath() {
return navigablePath;
}
@Override
public void resolveFromPreviousRow(Data data) {
if ( data.getState() == State.UNINITIALIZED ) {
if ( data.getInstance() == null ) {
data.setState( State.MISSING );
}
else {
final var initializer = keyAssembler.getInitializer();
if ( initializer != null ) {
initializer.resolveFromPreviousRow( data.getRowProcessingState() );
}
data.setState( State.INITIALIZED );
}
}
}
@Override
public void resolveInstance(Data data) {
if ( data.getState() == State.KEY_RESOLVED ) {
final var rowProcessingState = data.getRowProcessingState();
final Object identifier = keyAssembler.assemble( rowProcessingState );
data.entityIdentifier = identifier;
if ( identifier == null ) {
data.setState( State.MISSING );
data.setInstance( null );
}
else {
data.setState( State.INITIALIZED );
initialize( data );
}
}
}
@Override
public void resolveInstance(Object instance, Data data) {
if ( instance == null ) {
data.setState( State.MISSING );
data.entityIdentifier = null;
data.setInstance( null );
}
else {
final var rowProcessingState = data.getRowProcessingState();
final var session = rowProcessingState.getSession();
final var persistenceContext = session.getPersistenceContextInternal();
final var lazyInitializer = extractLazyInitializer( instance );
if ( lazyInitializer == null ) {
data.setState( State.INITIALIZED );
data.entityIdentifier = concreteDescriptor.getIdentifier( instance, session );
}
else if ( lazyInitializer.isUninitialized() ) {
data.setState( State.RESOLVED );
data.entityIdentifier = lazyInitializer.getInternalIdentifier();
}
else {
data.setState( State.INITIALIZED );
data.entityIdentifier = lazyInitializer.getInternalIdentifier();
}
final var entityKey = new EntityKey( data.entityIdentifier, concreteDescriptor );
final var entityHolder = persistenceContext.getEntityHolder(
entityKey
);
if ( entityHolder == null || entityHolder.getEntity() != instance && entityHolder.getProxy() != instance ) {
// the existing entity instance is detached or transient
if ( entityHolder != null ) {
final var managed = entityHolder.getManagedObject();
data.setInstance( managed );
data.entityIdentifier = entityHolder.getEntityKey().getIdentifier();
data.setState( entityHolder.isInitialized() ? State.INITIALIZED : State.RESOLVED );
}
else {
initialize( data, null, session, persistenceContext );
}
}
else {
data.setInstance( instance );
}
if ( keyIsEager ) {
final var initializer = keyAssembler.getInitializer();
assert initializer != null;
initializer.resolveInstance( data.entityIdentifier, rowProcessingState );
}
else if ( rowProcessingState.needsResolveState() ) {
// Resolve the state of the identifier if result caching is enabled and this is not a query cache hit
keyAssembler.resolveState( rowProcessingState );
}
}
}
@Override
public void initializeInstance(Data data) {
if ( data.getState() == State.RESOLVED ) {
data.setState( State.INITIALIZED );
Hibernate.initialize( data.getInstance() );
}
}
protected void initialize(EntitySelectFetchInitializerData data) {
final var rowProcessingState = data.getRowProcessingState();
final var session = rowProcessingState.getSession();
final var persistenceContext = session.getPersistenceContextInternal();
final EntityKey entityKey = new EntityKey( data.entityIdentifier, concreteDescriptor );
initialize( data, persistenceContext.getEntityHolder( entityKey ), session, persistenceContext );
}
protected void initialize(
EntitySelectFetchInitializerData data,
@Nullable EntityHolder holder,
SharedSessionContractImplementor session,
PersistenceContext persistenceContext) {
if ( holder != null ) {
data.setInstance( persistenceContext.proxyFor( holder, concreteDescriptor ) );
if ( holder.getEntityInitializer() == null ) {
if ( data.getInstance() != null && Hibernate.isInitialized( data.getInstance() ) ) {
data.setState( State.INITIALIZED );
return;
}
}
else if ( holder.getEntityInitializer() != this ) {
// the entity is already being loaded elsewhere in this processing level
if ( holder.getJdbcValuesProcessingState() == data.getRowProcessingState().getJdbcValuesSourceProcessingState() ) {
data.setState( State.INITIALIZED );
}
return;
}
else if ( data.getInstance() == null ) {
// todo: maybe mark this as resolved instead?
assert holder.getProxy() == null : "How to handle this case?";
data.setState( State.INITIALIZED );
return;
}
}
data.setState( State.INITIALIZED );
final String entityName = concreteDescriptor.getEntityName();
final Object instance = session.internalLoad(
entityName,
data.entityIdentifier,
true,
toOneMapping.isInternalLoadNullable()
);
data.setInstance( instance );
if ( instance == null ) {
checkNotFound( data );
persistenceContext.claimEntityHolderIfPossible(
new EntityKey( data.entityIdentifier, concreteDescriptor ),
null,
data.getRowProcessingState().getJdbcValuesSourceProcessingState(),
this
);
}
final boolean unwrapProxy = toOneMapping.isUnwrapProxy() && isEnhancedForLazyLoading;
final var lazyInitializer = extractLazyInitializer( data.getInstance() );
if ( lazyInitializer != null ) {
lazyInitializer.setUnwrap( unwrapProxy );
}
}
void checkNotFound(EntitySelectFetchInitializerData data) {
checkNotFound( toOneMapping, affectedByFilter,
concreteDescriptor.getEntityName(),
data.entityIdentifier );
}
static void checkNotFound(
ToOneAttributeMapping toOneMapping,
boolean affectedByFilter,
String entityName, Object identifier) {
final var notFoundAction = toOneMapping.getNotFoundAction();
if ( notFoundAction != NotFoundAction.IGNORE ) {
if ( affectedByFilter ) {
throw new EntityFilterException( entityName, identifier,
toOneMapping.getNavigableRole().getFullPath() );
}
if ( notFoundAction == NotFoundAction.EXCEPTION ) {
throw new FetchNotFoundException( entityName, identifier );
}
}
}
@Override
public void initializeInstanceFromParent(Object parentInstance, Data data) {
final var attributeMapping = getInitializedPart().asAttributeMapping();
final Object instance =
attributeMapping != null
? attributeMapping.getValue( parentInstance )
: parentInstance;
if ( instance == null ) {
data.setState( State.MISSING );
data.entityIdentifier = null;
data.setInstance( null );
}
else {
data.setState( State.INITIALIZED );
// No need to initialize this
data.entityIdentifier = null;
data.setInstance( instance );
Hibernate.initialize( instance );
}
}
@Override
protected void forEachSubInitializer(BiConsumer<Initializer<?>, RowProcessingState> consumer, InitializerData data) {
final var initializer = keyAssembler.getInitializer();
if ( initializer != null ) {
consumer.accept( initializer, data.getRowProcessingState() );
}
}
@Override
public EntityPersister getEntityDescriptor() {
return concreteDescriptor;
}
@Override
public EntityPersister getConcreteDescriptor(Data data) {
return concreteDescriptor;
}
@Override
public @Nullable Object getEntityIdentifier(Data data) {
return data.entityIdentifier;
}
@Override
public boolean isPartOfKey() {
return isPartOfKey;
}
@Override
public boolean isEager() {
return true;
}
@Override
public void resolveState(EntitySelectFetchInitializerData data) {
keyAssembler.resolveState( data.getRowProcessingState() );
}
@Override
public boolean hasLazySubInitializers() {
return hasLazySubInitializer;
}
@Override
public boolean isResultInitializer() {
return false;
}
@Override
public String toString() {
return "EntitySelectFetchInitializer("
+ toLoggableString( getNavigablePath() ) + ")";
}
public DomainResultAssembler<?> getKeyAssembler() {
return keyAssembler;
}
}
| EntitySelectFetchInitializerData |
java | apache__flink | flink-table/flink-table-code-splitter/src/test/resources/block/expected/TestWhileLoopRewrite.java | {
"start": 7,
"end": 1361
} | class ____ {
int counter = 0;
public void myFun(int[] a, int[] b, int[] c) {
a[0] += b[1];
b[1] += a[1];
while (counter < 10) {
myFun_0_0_rewriteGroup2(a, b, c);
myFun_0_0_rewriteGroup4(a, b, c);
counter--;
}
a[4] += b[4];
b[5] += a[5];
}
void myFun_0_0_rewriteGroup4(int[] a, int[] b, int[] c) {
myFun_0_0_4(a, b, c);
if (a[0] > 0) {
System.out.println("Hello");
} else {
System.out.println("World");
}
}
void myFun_0_0_1_3(int[] a, int[] b, int[] c) {
b[counter] = a[counter] * 3;
System.out.println(b[counter]);
}
void myFun_0_0_4(int[] a, int[] b, int[] c) {
a[2] += b[2];
b[3] += a[3];
}
void myFun_0_0_rewriteGroup2(int[] a, int[] b, int[] c) {
myFun_0_0_1(a, b, c);
if (a[counter] > 0) {
myFun_0_0_1_2(a, b, c);
} else {
myFun_0_0_1_3(a, b, c);
}
}
void myFun_0_0_1_2(int[] a, int[] b, int[] c) {
b[counter] = a[counter] * 2;
c[counter] = b[counter] * 2;
System.out.println(b[counter]);
}
void myFun_0_0_1(int[] a, int[] b, int[] c) {
c[counter] = a[0] + 1000;
System.out.println(c);
}
}
| TestWhileLoopRewrite |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/spi/MockSendToEndpointStrategyFactory.java | {
"start": 989,
"end": 1513
} | interface ____ {
String FACTORY = "mock-send-to-endpoint-strategy-factory";
/**
* Mock sending to endpoint using interceptor EIPs
*
* @param pattern pattern for intercepting (null or * = intercept all, otherwise it's a text pattern (and regexp).
* @param skip whether to skip sending to original endpoint (only to mock endpoint).
* @return the endpoint strategy that intercepts.
*/
EndpointStrategy mock(String pattern, boolean skip);
}
| MockSendToEndpointStrategyFactory |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java | {
"start": 3943,
"end": 62347
} | class ____ extends Configured implements Tool {
private static final String CONTAINER_ID_OPTION = "containerId";
private static final String APPLICATION_ID_OPTION = "applicationId";
private static final String APPLICATION_ATTEMPT_ID_OPTION =
"applicationAttemptId";
private static final String CLUSTER_ID_OPTION = "clusterId";
private static final String NODE_ADDRESS_OPTION = "nodeAddress";
private static final String APP_OWNER_OPTION = "appOwner";
private static final String AM_CONTAINER_OPTION = "am";
private static final String PER_CONTAINER_LOG_FILES_OPTION = "log_files";
private static final String PER_CONTAINER_LOG_FILES_OLD_OPTION = "logFiles";
private static final String PER_CONTAINER_LOG_FILES_REGEX_OPTION
= "log_files_pattern";
private static final String LIST_NODES_OPTION = "list_nodes";
private static final String SHOW_APPLICATION_LOG_INFO
= "show_application_log_info";
private static final String SHOW_CONTAINER_LOG_INFO
= "show_container_log_info";
private static final String OUT_OPTION = "out";
private static final String SIZE_OPTION = "size";
private static final String CLIENT_MAX_RETRY_OPTION = "client_max_retries";
private static final String CLIENT_RETRY_INTERVAL_OPTION
= "client_retry_interval_ms";
public static final String HELP_CMD = "help";
private static final String SIZE_LIMIT_OPTION = "size_limit_mb";
private PrintStream outStream = System.out;
private YarnClient yarnClient = null;
private Client webServiceClient = null;
private static final int DEFAULT_MAX_RETRIES = 30;
private static final long DEFAULT_RETRY_INTERVAL = 1000;
private static final long LOG_SIZE_LIMIT_DEFAULT = 10240L;
private long logSizeLeft = LOG_SIZE_LIMIT_DEFAULT * 1024 * 1024;
private long specifedLogLimits = LOG_SIZE_LIMIT_DEFAULT;
// We define the timeout to set the
// read and connect times for the URL connection.
private static final int TIME_OUT = 5000;
@Override
public int run(String[] args) throws Exception {
try {
webServiceClient = WebServiceClient.getWebServiceClient().createClient();
return runCommand(args);
} finally {
if (yarnClient != null) {
yarnClient.close();
}
if (webServiceClient != null) {
webServiceClient.close();
}
}
}
private int runCommand(String[] args) throws Exception {
Options opts = createCommandOpts();
Options printOpts = createPrintOpts(opts);
if (args.length < 1) {
printHelpMessage(printOpts);
return -1;
}
if (args[0].equals("-help")) {
printHelpMessage(printOpts);
return 0;
}
CommandLineParser parser = new GnuParser();
String appIdStr = null;
String appAttemptIdStr = null;
String clusterIdStr = null;
String containerIdStr = null;
String nodeAddress = null;
String appOwner = null;
boolean getAMContainerLogs = false;
boolean nodesList = false;
boolean showApplicationLogInfo = false;
boolean showContainerLogInfo = false;
boolean useRegex = false;
String[] logFiles = null;
String[] logFilesRegex = null;
List<String> amContainersList = new ArrayList<String>();
String localDir = null;
long bytes = Long.MAX_VALUE;
boolean ignoreSizeLimit = false;
int maxRetries = DEFAULT_MAX_RETRIES;
long retryInterval = DEFAULT_RETRY_INTERVAL;
try {
CommandLine commandLine = parser.parse(opts, args, false);
appIdStr = commandLine.getOptionValue(APPLICATION_ID_OPTION);
appAttemptIdStr = commandLine.getOptionValue(
APPLICATION_ATTEMPT_ID_OPTION);
containerIdStr = commandLine.getOptionValue(CONTAINER_ID_OPTION);
nodeAddress = commandLine.getOptionValue(NODE_ADDRESS_OPTION);
appOwner = commandLine.getOptionValue(APP_OWNER_OPTION);
getAMContainerLogs = commandLine.hasOption(AM_CONTAINER_OPTION);
nodesList = commandLine.hasOption(LIST_NODES_OPTION);
localDir = commandLine.getOptionValue(OUT_OPTION);
showApplicationLogInfo = commandLine.hasOption(
SHOW_APPLICATION_LOG_INFO);
showContainerLogInfo = commandLine.hasOption(SHOW_CONTAINER_LOG_INFO);
if (getAMContainerLogs) {
try {
amContainersList = parseAMContainer(commandLine, printOpts);
} catch (NumberFormatException ex) {
System.err.println(ex.getMessage());
return -1;
}
}
if (commandLine.hasOption(CLUSTER_ID_OPTION)) {
clusterIdStr = commandLine.getOptionValue(CLUSTER_ID_OPTION);
getConf().set(YarnConfiguration.RM_CLUSTER_ID, clusterIdStr);
}
if (commandLine.hasOption(PER_CONTAINER_LOG_FILES_OPTION)) {
logFiles = commandLine.getOptionValues(PER_CONTAINER_LOG_FILES_OPTION);
} else {
// For backward compatibility, we need to check for the old form of this
// command line option as well. New form takes precedent.
if (commandLine.hasOption(PER_CONTAINER_LOG_FILES_OLD_OPTION)) {
logFiles = commandLine.getOptionValues(PER_CONTAINER_LOG_FILES_OLD_OPTION);
}
}
if (commandLine.hasOption(PER_CONTAINER_LOG_FILES_REGEX_OPTION)) {
logFilesRegex = commandLine.getOptionValues(
PER_CONTAINER_LOG_FILES_REGEX_OPTION);
useRegex = true;
}
if (commandLine.hasOption(SIZE_OPTION)) {
bytes = Long.parseLong(commandLine.getOptionValue(SIZE_OPTION));
}
if (commandLine.hasOption(CLIENT_MAX_RETRY_OPTION)) {
maxRetries = Integer.parseInt(commandLine.getOptionValue(
CLIENT_MAX_RETRY_OPTION));
}
if (commandLine.hasOption(CLIENT_RETRY_INTERVAL_OPTION)) {
retryInterval = Long.parseLong(commandLine.getOptionValue(
CLIENT_RETRY_INTERVAL_OPTION));
}
if (commandLine.hasOption(SIZE_LIMIT_OPTION)) {
specifedLogLimits = Long.parseLong(commandLine.getOptionValue(
SIZE_LIMIT_OPTION));
logSizeLeft = specifedLogLimits * 1024 * 1024;
}
if (logSizeLeft < 0L) {
ignoreSizeLimit = true;
}
} catch (ParseException e) {
System.err.println("options parsing failed: " + e.getMessage());
printHelpMessage(printOpts);
return -1;
}
if (appIdStr == null && appAttemptIdStr == null && containerIdStr == null) {
System.err.println("None of applicationId, appAttemptId and containerId "
+ "is available, one of them must be specified.");
printHelpMessage(printOpts);
return -1;
}
ApplicationId appId = null;
if (appIdStr != null) {
try {
appId = ApplicationId.fromString(appIdStr);
} catch (Exception e) {
System.err.println("Invalid ApplicationId specified");
return -1;
}
}
ApplicationAttemptId appAttemptId = null;
if (appAttemptIdStr != null) {
try {
appAttemptId = ApplicationAttemptId.fromString(appAttemptIdStr);
if (appId == null) {
appId = appAttemptId.getApplicationId();
} else if (!appId.equals(appAttemptId.getApplicationId())) {
System.err.println("The Application:" + appId
+ " does not have the AppAttempt:" + appAttemptId);
return -1;
}
} catch (Exception e) {
System.err.println("Invalid AppAttemptId specified");
return -1;
}
}
if (containerIdStr != null) {
try {
ContainerId containerId = ContainerId.fromString(containerIdStr);
if (appAttemptId != null && !appAttemptId.equals(
containerId.getApplicationAttemptId())) {
System.err.println("The AppAttempt:" + appAttemptId
+ " does not have the container:" + containerId);
return -1;
}
if (appId == null) {
appId = containerId.getApplicationAttemptId().getApplicationId();
} else if (!containerId.getApplicationAttemptId().getApplicationId()
.equals(appId)) {
System.err.println("The Application:" + appId
+ " does not have the container:" + containerId);
return -1;
}
} catch (Exception e) {
System.err.println("Invalid ContainerId specified");
return -1;
}
}
if (showApplicationLogInfo && showContainerLogInfo) {
System.err.println("Invalid options. Can only accept one of "
+ "show_application_log_info/show_container_log_info.");
return -1;
}
if (logFiles != null && logFiles.length > 0 && logFilesRegex != null
&& logFilesRegex.length > 0) {
System.err.println("Invalid options. Can only accept one of "
+ "log_files/log_files_pattern.");
return -1;
}
if (localDir != null) {
File file = new File(localDir);
if (file.exists() && file.isFile()) {
System.err.println("Invalid value for -out option. "
+ "Please provide a directory.");
return -1;
}
}
// Set up Retry WebService Client
ClientJerseyRetryFilter retryFilter = new ClientJerseyRetryFilter(maxRetries, retryInterval);
webServiceClient.register(retryFilter);
LogCLIHelpers logCliHelper = new LogCLIHelpers();
logCliHelper.setConf(getConf());
yarnClient = createYarnClient();
YarnApplicationState appState = YarnApplicationState.NEW;
ApplicationReport appReport = null;
try {
appReport = getApplicationReport(appId);
appState = appReport.getYarnApplicationState();
if (appState == YarnApplicationState.NEW
|| appState == YarnApplicationState.NEW_SAVING
|| appState == YarnApplicationState.SUBMITTED) {
System.err.println("Logs are not available right now.");
return -1;
}
} catch (IOException | YarnException e) {
// If we can not get appReport from either RM or ATS
// We will assume that this app has already finished.
appState = YarnApplicationState.FINISHED;
System.err.println("Unable to get ApplicationState."
+ " Attempting to fetch logs directly from the filesystem.");
}
if (appOwner == null || appOwner.isEmpty()) {
appOwner = guessAppOwner(appReport, appId);
if (appOwner == null) {
System.err.println("Can not find the appOwner. "
+ "Please specify the correct appOwner");
System.err.println("Could not locate application logs for " + appId);
return -1;
}
}
Set<String> logs = new HashSet<String>();
if (fetchAllLogFiles(logFiles, logFilesRegex)) {
logs.add("ALL");
} else if (logFiles != null && logFiles.length > 0) {
logs.addAll(Arrays.asList(logFiles));
} else if (logFilesRegex != null && logFilesRegex.length > 0) {
logs.addAll(Arrays.asList(logFilesRegex));
}
ContainerLogsRequest request = new ContainerLogsRequest(appId, appAttemptId,
Apps.isApplicationFinalState(appState), appOwner, nodeAddress,
null, containerIdStr, localDir, logs, bytes, null);
if (showContainerLogInfo) {
return showContainerLogInfo(request, logCliHelper);
}
if (nodesList) {
return showNodeLists(request, logCliHelper);
}
if (showApplicationLogInfo) {
return showApplicationLogInfo(request, logCliHelper);
}
// To get am logs
if (getAMContainerLogs) {
return fetchAMContainerLogs(request, amContainersList,
logCliHelper, useRegex, ignoreSizeLimit);
}
int resultCode = 0;
if (containerIdStr != null) {
return fetchContainerLogs(request, logCliHelper, useRegex,
ignoreSizeLimit);
} else {
if (nodeAddress == null) {
resultCode = fetchApplicationLogs(request, logCliHelper, useRegex,
ignoreSizeLimit);
} else {
System.err.println("Should at least provide ContainerId!");
printHelpMessage(printOpts);
resultCode = -1;
}
}
return resultCode;
}
private ApplicationReport getApplicationReport(ApplicationId appId)
throws IOException, YarnException {
return yarnClient.getApplicationReport(appId);
}
@VisibleForTesting
protected YarnClient createYarnClient() {
YarnClient client = YarnClient.createYarnClient();
client.init(getConf());
client.start();
return client;
}
public static void main(String[] args) throws Exception {
Configuration conf = new YarnConfiguration();
LogsCLI logDumper = new LogsCLI();
logDumper.setConf(conf);
WebServiceClient.initialize(conf);
int exitCode = logDumper.run(args);
WebServiceClient.destroy();
System.exit(exitCode);
}
private void printHelpMessage(Options options) {
outStream.println("Retrieve logs for YARN applications.");
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("yarn logs -applicationId <application ID> [OPTIONS]",
new Options());
formatter.setSyntaxPrefix("");
formatter.printHelp("general options are:", options);
}
protected List<JSONObject> getAMContainerInfoForRMWebService(
Configuration conf, String appId) throws Exception {
return WebAppUtils.execOnActiveRM(conf, this::getAMContainerInfoFromRM,
appId);
}
private List<JSONObject> getAMContainerInfoFromRM(
String webAppAddress, String appId) throws ProcessingException,
IllegalStateException, JSONException {
List<JSONObject> amContainersList = new ArrayList<JSONObject>();
final WebTarget target = webServiceClient.target(webAppAddress)
.path("ws").path("v1").path("cluster")
.path("apps").path(appId).path("appattempts");
try (Response response = target.request(MediaType.APPLICATION_JSON)
.get(Response.class)) {
String entity = response.readEntity(String.class);
JSONObject json = new JSONObject(entity)
.getJSONObject("appAttempts");
JSONArray requests = json.getJSONArray("appAttempt");
for (int j = 0; j < requests.length(); j++) {
amContainersList.add(requests.getJSONObject(j));
}
return amContainersList;
}
}
private List<JSONObject> getAMContainerInfoForAHSWebService(
Configuration conf, String appId) throws ProcessingException,
IllegalStateException, JSONException {
String webAppAddress =
WebAppUtils.getHttpSchemePrefix(conf)
+ WebAppUtils.getAHSWebAppURLWithoutScheme(conf);
final WebTarget target = webServiceClient.target(webAppAddress);
Response response =
target.path("ws").path("v1").path("applicationhistory")
.path("apps").path(appId).path("appattempts")
.request(MediaType.APPLICATION_JSON)
.get(Response.class);
String entity = response.readEntity(String.class);
JSONObject json = new JSONObject(entity);
JSONArray requests = json.getJSONArray("appAttempt");
List<JSONObject> amContainersList = new ArrayList<JSONObject>();
for (int i = 0; i < requests.length(); i++) {
amContainersList.add(requests.getJSONObject(i));
}
Collections.reverse(amContainersList);
return amContainersList;
}
private boolean fetchAllLogFiles(String[] logFiles, String[] logFilesRegex) {
// If no value is specified for the PER_CONTAINER_LOG_FILES_OPTION option
// and PER_CONTAINER_LOG_FILES_REGEX_OPTION
// we will assume all logs.
if ((logFiles == null || logFiles.length == 0) && (
logFilesRegex == null || logFilesRegex.length == 0)) {
return true;
}
if (logFiles != null && logFiles.length > 0) {
List<String> logs = Arrays.asList(logFiles);
if (logs.contains("ALL") || logs.contains("*")) {
return true;
}
}
if (logFilesRegex != null && logFilesRegex.length > 0) {
List<String> logsRegex = Arrays.asList(logFilesRegex);
if (logsRegex.contains(".*")) {
return true;
}
}
return false;
}
private List<Pair<ContainerLogFileInfo, String>> getContainerLogFiles(
Configuration conf, String containerIdStr, String nodeHttpAddress)
throws IOException {
List<Pair<ContainerLogFileInfo, String>> logFileInfos
= new ArrayList<>();
try {
WebTarget target = webServiceClient
.target(WebAppUtils.getHttpSchemePrefix(conf) + nodeHttpAddress);
Response response =
target.path("ws").path("v1").path("node").path("containers")
.path(containerIdStr).path("logs")
.request(MediaType.APPLICATION_JSON)
.get(Response.class);
if (response.getStatusInfo().getStatusCode() ==
Response.Status.OK.getStatusCode()) {
try {
JSONArray array = new JSONArray();
String entity = response.readEntity(String.class);
JSONObject json = new JSONObject(entity);
if (json.has("containerLogsInfoes")) {
json = json.getJSONObject("containerLogsInfoes");
}
if (!json.has("containerLogsInfo")) {
return logFileInfos;
}
Object logsInfoObj = json.get("containerLogsInfo");
if (logsInfoObj instanceof JSONObject) {
array.put(logsInfoObj);
} else if (logsInfoObj instanceof JSONArray) {
JSONArray logsArray = (JSONArray)logsInfoObj;
for (int i=0; i < logsArray.length(); i++) {
array.put(logsArray.getJSONObject(i));
}
}
for (int i = 0; i < array.length(); i++) {
JSONObject log = array.getJSONObject(i);
String aggregateType = log.has("logAggregationType") ?
log.getString("logAggregationType") : "N/A";
if (!log.has("containerLogInfo")) {
continue;
}
Object ob = log.get("containerLogInfo");
if (ob instanceof JSONArray) {
JSONArray obArray = (JSONArray)ob;
for (int j = 0; j < obArray.length(); j++) {
logFileInfos.add(new Pair<ContainerLogFileInfo, String>(
generatePerContainerLogFileInfoFromJSON(
obArray.getJSONObject(j)), aggregateType));
}
} else if (ob instanceof JSONObject) {
logFileInfos.add(new Pair<ContainerLogFileInfo, String>(
generatePerContainerLogFileInfoFromJSON(
(JSONObject)ob), aggregateType));
}
}
} catch (Exception e) {
System.err.println("Unable to parse json from webservice. Error:");
System.err.println(e.getMessage());
throw new IOException(e);
}
}
} catch (ProcessingException | IllegalStateException ex) {
System.err.println("Unable to fetch log files list");
throw new IOException(ex);
}
return logFileInfos;
}
private ContainerLogFileInfo generatePerContainerLogFileInfoFromJSON(
JSONObject meta) throws JSONException {
String fileName = meta.has("fileName") ?
meta.getString("fileName") : "N/A";
String fileSize = meta.has("fileSize") ?
meta.getString("fileSize") : "N/A";
String lastModificationTime = meta.has("lastModifiedTime") ?
meta.getString("lastModifiedTime") : "N/A";
return new ContainerLogFileInfo(fileName, fileSize,
lastModificationTime);
}
@Private
@VisibleForTesting
public int printContainerLogsFromRunningApplication(Configuration conf,
ContainerLogsRequest request, LogCLIHelpers logCliHelper,
boolean useRegex, boolean ignoreSizeLimit) throws IOException {
String containerIdStr = request.getContainerId().toString();
String localDir = request.getOutputLocalDir();
String nodeId = request.getNodeId();
PrintStream out = LogToolUtils.createPrintStream(localDir, nodeId,
containerIdStr);
try {
boolean foundAnyLogs = false;
byte[] buffer = new byte[65536];
for (String logFile : request.getLogTypes()) {
InputStream is = null;
try {
Response response = getResponseFromNMWebService(conf,
webServiceClient, request, logFile);
if (response != null && response.getStatusInfo().getStatusCode() ==
Response.Status.OK.getStatusCode()) {
is = response.readEntity(InputStream.class);
int len = 0;
while((len = is.read(buffer)) != -1) {
out.write(buffer, 0, len);
}
out.println();
} else {
out.println("Can not get any logs for the log file: " + logFile);
String msg = "Response from the NodeManager:" + nodeId +
" WebService is " + ((response == null) ? "null":
"not successful," + " HTTP error code: " +
response.getStatus() + ", Server response:\n" +
response.readEntity(String.class));
out.println(msg);
}
out.flush();
foundAnyLogs = true;
} catch (ProcessingException | IllegalStateException ex) {
System.err.println("Can not find the log file:" + logFile
+ " for the container:" + containerIdStr + " in NodeManager:"
+ nodeId);
} finally {
IOUtils.closeStream(is);
}
}
if (foundAnyLogs) {
return 0;
} else {
return -1;
}
} finally {
logCliHelper.closePrintStream(out);
}
}
@Private
@VisibleForTesting
public ContainerReport getContainerReport(String containerIdStr)
throws YarnException, IOException {
return yarnClient.getContainerReport(
ContainerId.fromString(containerIdStr));
}
private int printAMContainerLogs(Configuration conf,
ContainerLogsRequest request, List<String> amContainers,
LogCLIHelpers logCliHelper, boolean useRegex, boolean ignoreSizeLimit)
throws Exception {
List<JSONObject> amContainersList = null;
List<ContainerLogsRequest> requests =
new ArrayList<ContainerLogsRequest>();
boolean getAMContainerLists = false;
String appId = request.getAppId().toString();
StringBuilder errorMessage = new StringBuilder();
// We will call RM webservice to get all AppAttempts information.
// If we get nothing, we will try to call AHS webservice to get AppAttempts
// which includes nodeAddress for the AM Containers.
try {
amContainersList = getAMContainerInfoForRMWebService(conf, appId);
if (amContainersList != null && !amContainersList.isEmpty()) {
getAMContainerLists = true;
for (JSONObject amContainer : amContainersList) {
ContainerLogsRequest amRequest = new ContainerLogsRequest(request);
amRequest.setContainerId(amContainer.getString("containerId"));
String httpAddress = amContainer.getString("nodeHttpAddress");
if (httpAddress != null && !httpAddress.isEmpty()) {
amRequest.setNodeHttpAddress(httpAddress);
}
amRequest.setNodeId(amContainer.getString("nodeId"));
requests.add(amRequest);
}
}
} catch (Exception ex) {
errorMessage.append(ex.getMessage() + "\n");
if (request.isAppFinished()) {
if (!conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED,
YarnConfiguration.DEFAULT_TIMELINE_SERVICE_ENABLED)) {
errorMessage.append("Please enable the timeline service "
+ "and make sure the timeline server is running.");
} else {
try {
if (YarnConfiguration.timelineServiceV2Enabled(conf)) {
try {
amContainersList =
getAMContainerInfoFromTimelineReader(conf, appId);
getAMContainerLists =
createContainerLogsRequestForMasterContainer(requests,
request, amContainersList,
AppAttemptMetricsConstants.MASTER_CONTAINER_INFO);
} catch (Exception e) {
System.err.println(
"Unable to get AM container informations from "
+ "TimelineReader for the application:" + appId);
if (YarnConfiguration.timelineServiceV1Enabled(conf)
|| YarnConfiguration.timelineServiceV15Enabled(conf)) {
getAMContainerLists =
getAMContainerInfoForAHSWebService(conf, appId, requests,
request);
} else {
throw e;
}
}
} else {
getAMContainerLists =
getAMContainerInfoForAHSWebService(conf, appId, requests,
request);
}
} catch (Exception e) {
errorMessage.append(e.getMessage());
}
}
}
}
if (!getAMContainerLists) {
System.err.println("Unable to get AM container informations "
+ "for the application:" + appId);
System.err.println(errorMessage);
System.err.println("Can not get AMContainers logs for "
+ "the application:" + appId + " with the appOwner:"
+ request.getAppOwner());
return -1;
}
List<ContainerLogsRequest> candidates = new ArrayList<>();
if (amContainers.contains("ALL")) {
candidates.addAll(requests);
outStream.println();
outStream.println("Specified ALL for -am option. "
+ "Printed logs for all am containers.");
} else {
for (String amContainer : amContainers) {
int amContainerId = Integer.parseInt(amContainer.trim());
if (amContainerId == -1) {
candidates.add(requests.get(requests.size() - 1));
} else {
if (amContainerId <= requests.size()) {
candidates.add(requests.get(amContainerId - 1));
} else {
System.err.println(String.format("ERROR: Specified AM containerId"
+ " (%s) exceeds the number of AM containers (%s).",
amContainerId, requests.size()));
return -1;
}
}
}
}
Map<String, ContainerLogsRequest> newOptions = new HashMap<>();
if (request.isAppFinished()) {
newOptions = getMatchedLogTypesForFinishedApp(candidates,
logCliHelper, useRegex, ignoreSizeLimit);
} else {
newOptions = getMatchedLogTypesForRunningApp(candidates, useRegex,
ignoreSizeLimit);
}
for (Entry<String, ContainerLogsRequest> amRequest
: newOptions.entrySet()) {
outputAMContainerLogs(amRequest.getValue(), conf, logCliHelper,
useRegex, ignoreSizeLimit);
}
return 0;
}
private boolean getAMContainerInfoForAHSWebService(Configuration conf,
String appId, List<ContainerLogsRequest> requests,
ContainerLogsRequest request) throws JSONException {
List<JSONObject> amContainersList =
getAMContainerInfoForAHSWebService(conf, appId);
return createContainerLogsRequestForMasterContainer(requests, request,
amContainersList, "amContainerId");
}
private boolean createContainerLogsRequestForMasterContainer(
List<ContainerLogsRequest> requests, ContainerLogsRequest request,
List<JSONObject> amContainersList, String masterContainerInfo)
throws JSONException {
boolean getAMContainerLists = false;
if (amContainersList != null && !amContainersList.isEmpty()) {
getAMContainerLists = true;
for (JSONObject amContainer : amContainersList) {
ContainerLogsRequest amRequest = new ContainerLogsRequest(request);
amRequest.setContainerId(amContainer.getString(masterContainerInfo));
requests.add(amRequest);
}
}
return getAMContainerLists;
}
private List<JSONObject> getAMContainerInfoFromTimelineReader(
Configuration conf, String appId)
throws IOException, ProcessingException, IllegalStateException,
JSONException {
final Response response = getClientResponseFromTimelineReader(conf, appId);
String entity = response.readEntity(String.class);
JSONArray appAttemptEntities = new JSONArray(entity);
List<JSONObject> amContainersList = new ArrayList<JSONObject>();
for (int i = 0; i < appAttemptEntities.length(); i++) {
JSONObject appAttemptEntity = appAttemptEntities.getJSONObject(i);
JSONObject infoField = appAttemptEntity.getJSONObject("info");
amContainersList.add(infoField);
}
Collections.reverse(amContainersList);
return amContainersList;
}
protected Response getClientResponseFromTimelineReader(
Configuration conf, String appId) throws IOException {
String webAppAddress = WebAppUtils.getHttpSchemePrefix(conf) + WebAppUtils
.getTimelineReaderWebAppURLWithoutScheme(conf);
final WebTarget target = webServiceClient.target(webAppAddress);
final Response response =
target.path("ws").path("v2").path("timeline").path("clusters")
.path(conf.get(YarnConfiguration.RM_CLUSTER_ID)).path("apps")
.path(appId).path("entities")
.path(TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString())
.queryParam("fields", "INFO").request(MediaType.APPLICATION_JSON)
.get(Response.class);
if (response == null
|| response.getStatusInfo().getStatusCode() != Response.Status.OK
.getStatusCode()) {
String msg =
"Response from the timeline reader server is " + ((response == null) ?
"null" :
"not successful," + " HTTP error code: " + response.getStatus()
+ ", Server response:\n" + response.readEntity(String.class));
System.out.println(msg);
throw new IOException(msg);
}
return response;
}
private void outputAMContainerLogs(ContainerLogsRequest request,
Configuration conf, LogCLIHelpers logCliHelper, boolean useRegex,
boolean ignoreSizeLimit) throws Exception {
String nodeHttpAddress = request.getNodeHttpAddress();
String containerId = request.getContainerId();
String nodeId = request.getNodeId();
if (request.isAppFinished()) {
if (containerId != null && !containerId.isEmpty()) {
if (nodeId != null && !nodeId.isEmpty()) {
logCliHelper.dumpAContainerLogsForLogType(request);
} else {
logCliHelper.dumpAContainerLogsForLogTypeWithoutNodeId(
request);
}
}
} else {
if (nodeHttpAddress != null && containerId != null
&& !nodeHttpAddress.isEmpty() && !containerId.isEmpty()) {
ContainerState containerState = getContainerReport(containerId)
.getContainerState();
request.setContainerState(containerState);
printContainerLogsFromRunningApplication(conf,
request, logCliHelper, useRegex, ignoreSizeLimit);
}
}
}
private int showContainerLogInfo(ContainerLogsRequest request,
LogCLIHelpers logCliHelper) throws IOException, YarnException,
ProcessingException, IllegalStateException, JSONException {
if (!request.isAppFinished()) {
return printContainerInfoFromRunningApplication(request, logCliHelper);
} else {
return logCliHelper.printAContainerLogMetadata(
request, System.out, System.err);
}
}
private int showNodeLists(ContainerLogsRequest request,
LogCLIHelpers logCliHelper) throws IOException {
if (!request.isAppFinished()) {
System.err.println("The -list_nodes command can be only used with "
+ "finished applications");
return -1;
} else {
logCliHelper.printNodesList(request, System.out, System.err);
return 0;
}
}
private int showApplicationLogInfo(ContainerLogsRequest request,
LogCLIHelpers logCliHelper) throws IOException, YarnException {
String appState = "Application State: "
+ (request.isAppFinished() ? "Completed." : "Running.");
if (!request.isAppFinished()) {
List<ContainerReport> reports =
getContainerReportsFromRunningApplication(request);
List<ContainerReport> filterReports = filterContainersInfo(
request, reports);
if (filterReports.isEmpty()) {
System.err.println("Can not find any containers for the application:"
+ request.getAppId() + ".");
return -1;
}
outStream.println(appState);
for (ContainerReport report : filterReports) {
outStream.println(String.format(LogCLIHelpers.CONTAINER_ON_NODE_PATTERN,
report.getContainerId(), report.getAssignedNode()));
}
return 0;
} else {
outStream.println(appState);
logCliHelper.printContainersList(request, System.out, System.err);
return 0;
}
}
/**
* Create Command Options.
* @return the command options
*/
private Options createCommandOpts() {
Options opts = new Options();
opts.addOption(HELP_CMD, false, "Displays help for all commands.");
Option appIdOpt =
new Option(APPLICATION_ID_OPTION, true, "ApplicationId (required)");
opts.addOption(appIdOpt);
opts.addOption(APPLICATION_ATTEMPT_ID_OPTION, true, "ApplicationAttemptId. "
+ "Lists all logs belonging to the specified application attempt Id. "
+ "If specified, the applicationId can be omitted");
opts.addOption(CONTAINER_ID_OPTION, true, "ContainerId. "
+ "By default, it will print all available logs."
+ " Work with -log_files to get only specific logs. If specified, the"
+ " applicationId can be omitted");
opts.addOption(CLUSTER_ID_OPTION, true, "ClusterId. "
+ "By default, it will take default cluster id from the RM");
opts.addOption(NODE_ADDRESS_OPTION, true, "NodeAddress in the format "
+ "nodename:port");
opts.addOption(APP_OWNER_OPTION, true,
"AppOwner (assumed to be current user if not specified)");
Option amOption = new Option(AM_CONTAINER_OPTION, true,
"Prints the AM Container logs for this application. "
+ "Specify comma-separated value to get logs for related AM "
+ "Container. For example, If we specify -am 1,2, we will get "
+ "the logs for the first AM Container as well as the second "
+ "AM Container. To get logs for all AM Containers, use -am ALL. "
+ "To get logs for the latest AM Container, use -am -1. "
+ "By default, it will print all available logs. Work with -log_files "
+ "to get only specific logs.");
amOption.setValueSeparator(',');
amOption.setArgs(Option.UNLIMITED_VALUES);
amOption.setArgName("AM Containers");
opts.addOption(amOption);
Option logFileOpt = new Option(PER_CONTAINER_LOG_FILES_OPTION, true,
"Specify comma-separated value "
+ "to get exact matched log files. Use \"ALL\" or \"*\" to "
+ "fetch all the log files for the container.");
logFileOpt.setValueSeparator(',');
logFileOpt.setArgs(Option.UNLIMITED_VALUES);
logFileOpt.setArgName("Log File Name");
opts.addOption(logFileOpt);
Option oldLogFileOpt = new Option(PER_CONTAINER_LOG_FILES_OLD_OPTION, true,
"Deprecated name for log_files, please use log_files option instead");
oldLogFileOpt.setValueSeparator(',');
oldLogFileOpt.setArgs(Option.UNLIMITED_VALUES);
oldLogFileOpt.setArgName("Log File Name");
opts.addOption(oldLogFileOpt);
Option logFileRegexOpt = new Option(PER_CONTAINER_LOG_FILES_REGEX_OPTION,
true, "Specify comma-separated value "
+ "to get matched log files by using java regex. Use \".*\" to "
+ "fetch all the log files for the container.");
logFileRegexOpt.setValueSeparator(',');
logFileRegexOpt.setArgs(Option.UNLIMITED_VALUES);
logFileRegexOpt.setArgName("Log File Pattern");
opts.addOption(logFileRegexOpt);
opts.addOption(SHOW_CONTAINER_LOG_INFO, false,
"Show the container log metadata, "
+ "including log-file names, the size of the log files. "
+ "You can combine this with --containerId to get log metadata for "
+ "the specific container, or with --nodeAddress to get log metadata "
+ "for all the containers on the specific NodeManager.");
opts.addOption(SHOW_APPLICATION_LOG_INFO, false, "Show the "
+ "containerIds which belong to the specific Application. "
+ "You can combine this with --nodeAddress to get containerIds "
+ "for all the containers on the specific NodeManager.");
opts.addOption(LIST_NODES_OPTION, false,
"Show the list of nodes that successfully aggregated logs. "
+ "This option can only be used with finished applications.");
opts.addOption(OUT_OPTION, true, "Local directory for storing individual "
+ "container logs. The container logs will be stored based on the "
+ "node the container ran on.");
opts.addOption(SIZE_OPTION, true, "Prints the log file's first 'n' bytes "
+ "or the last 'n' bytes. Use negative values as bytes to read from "
+ "the end and positive values as bytes to read from the beginning.");
opts.addOption(CLIENT_MAX_RETRY_OPTION, true, "Set max retry number for a"
+ " retry client to get the container logs for the running "
+ "applications. Use a negative value to make retry forever. "
+ "The default value is 30.");
opts.addOption(CLIENT_RETRY_INTERVAL_OPTION, true,
"Work with --client_max_retries to create a retry client. "
+ "The default value is 1000.");
opts.addOption(SIZE_LIMIT_OPTION, true, "Use this option to limit "
+ "the size of the total logs which could be fetched. "
+ "By default, we only allow to fetch at most "
+ LOG_SIZE_LIMIT_DEFAULT + " MB logs. If the total log size is "
+ "larger than the specified number, the CLI would fail. "
+ "The user could specify -1 to ignore the size limit "
+ "and fetch all logs.");
opts.getOption(APPLICATION_ID_OPTION).setArgName("Application ID");
opts.getOption(CONTAINER_ID_OPTION).setArgName("Container ID");
opts.getOption(CLUSTER_ID_OPTION).setArgName("Cluster ID");
opts.getOption(NODE_ADDRESS_OPTION).setArgName("Node Address");
opts.getOption(APP_OWNER_OPTION).setArgName("Application Owner");
opts.getOption(AM_CONTAINER_OPTION).setArgName("AM Containers");
opts.getOption(OUT_OPTION).setArgName("Local Directory");
opts.getOption(SIZE_OPTION).setArgName("size");
opts.getOption(CLIENT_MAX_RETRY_OPTION).setArgName("Max Retries");
opts.getOption(CLIENT_RETRY_INTERVAL_OPTION)
.setArgName("Retry Interval");
opts.getOption(SIZE_LIMIT_OPTION).setArgName("Size Limit");
return opts;
}
/**
* Create Print options for helper message.
* @param commandOpts the options
* @return the print options
*/
private Options createPrintOpts(Options commandOpts) {
Options printOpts = new Options();
printOpts.addOption(commandOpts.getOption(HELP_CMD));
printOpts.addOption(commandOpts.getOption(CONTAINER_ID_OPTION));
printOpts.addOption(commandOpts.getOption(CLUSTER_ID_OPTION));
printOpts.addOption(commandOpts.getOption(NODE_ADDRESS_OPTION));
printOpts.addOption(commandOpts.getOption(APP_OWNER_OPTION));
printOpts.addOption(commandOpts.getOption(AM_CONTAINER_OPTION));
printOpts.addOption(commandOpts.getOption(PER_CONTAINER_LOG_FILES_OPTION));
printOpts.addOption(commandOpts.getOption(LIST_NODES_OPTION));
printOpts.addOption(commandOpts.getOption(SHOW_APPLICATION_LOG_INFO));
printOpts.addOption(commandOpts.getOption(SHOW_CONTAINER_LOG_INFO));
printOpts.addOption(commandOpts.getOption(OUT_OPTION));
printOpts.addOption(commandOpts.getOption(SIZE_OPTION));
printOpts.addOption(commandOpts.getOption(
PER_CONTAINER_LOG_FILES_REGEX_OPTION));
printOpts.addOption(commandOpts.getOption(CLIENT_MAX_RETRY_OPTION));
printOpts.addOption(commandOpts.getOption(CLIENT_RETRY_INTERVAL_OPTION));
printOpts.addOption(commandOpts.getOption(SIZE_LIMIT_OPTION));
return printOpts;
}
private List<String> parseAMContainer(CommandLine commandLine,
Options printOpts) throws NumberFormatException {
List<String> amContainersList = new ArrayList<String>();
String[] amContainers = commandLine.getOptionValues(AM_CONTAINER_OPTION);
for (String am : amContainers) {
boolean errorInput = false;
if (!am.trim().equalsIgnoreCase("ALL")) {
try {
int id = Integer.parseInt(am.trim());
if (id != -1 && id <= 0) {
errorInput = true;
}
} catch (NumberFormatException ex) {
errorInput = true;
}
if (errorInput) {
String errMessage =
"Invalid input for option -am. Valid inputs are 'ALL', -1 "
+ "and any other integer which is larger than 0.";
printHelpMessage(printOpts);
throw new NumberFormatException(errMessage);
}
amContainersList.add(am.trim());
} else {
amContainersList.add("ALL");
break;
}
}
return amContainersList;
}
private int fetchAMContainerLogs(ContainerLogsRequest request,
List<String> amContainersList, LogCLIHelpers logCliHelper,
boolean useRegex, boolean ignoreSizeLimit) throws Exception {
return printAMContainerLogs(getConf(), request, amContainersList,
logCliHelper, useRegex, ignoreSizeLimit);
}
private int fetchContainerLogs(ContainerLogsRequest request,
LogCLIHelpers logCliHelper, boolean useRegex, boolean ignoreSizeLimit)
throws IOException, ProcessingException, IllegalStateException,
JSONException {
String appIdStr = request.getAppId().toString();
String containerIdStr = request.getContainerId();
String nodeAddress = request.getNodeId();
String appOwner = request.getAppOwner();
boolean isAppFinished = request.isAppFinished();
// if the application is in the final state,
// we could directly get logs from HDFS.
if (isAppFinished) {
// if user specified "ALL" as the logFiles param, pass empty list
// to logCliHelper so that it fetches all the logs
ContainerLogsRequest newOptions = getMatchedLogOptions(
request, logCliHelper, useRegex, ignoreSizeLimit);
if (newOptions == null) {
System.err.println("Can not find any log file matching the pattern: "
+ request.getLogTypes() + " for the container: "
+ request.getContainerId() + " within the application: "
+ request.getAppId());
return -1;
}
if (nodeAddress != null && !nodeAddress.isEmpty()) {
return logCliHelper.dumpAContainerLogsForLogType(newOptions);
} else {
return logCliHelper.dumpAContainerLogsForLogTypeWithoutNodeId(
newOptions);
}
}
String nodeHttpAddress = null;
String nodeId = null;
try {
// If the nodeAddress is not provided, we will try to get
// the ContainerReport. In the containerReport, we could get
// nodeAddress and nodeHttpAddress
ContainerReport report = getContainerReport(containerIdStr);
nodeHttpAddress = report.getNodeHttpAddress();
if (nodeHttpAddress != null && !nodeHttpAddress.isEmpty()) {
nodeHttpAddress = nodeHttpAddress.replaceFirst(
WebAppUtils.getHttpSchemePrefix(getConf()), "");
request.setNodeHttpAddress(nodeHttpAddress);
}
nodeId = report.getAssignedNode().toString();
request.setNodeId(nodeId);
request.setContainerState(report.getContainerState());
} catch (IOException | YarnException ex) {
nodeHttpAddress = getNodeHttpAddressFromRMWebString(request);
if (nodeHttpAddress != null && !nodeHttpAddress.isEmpty()) {
request.setNodeHttpAddress(nodeHttpAddress);
} else {
// for the case, we have already uploaded partial logs in HDFS
int result = -1;
ContainerLogsRequest newOptions = getMatchedLogOptions(
request, logCliHelper, useRegex, ignoreSizeLimit);
if (newOptions == null) {
System.err.println("Can not find any log file matching the pattern: "
+ request.getLogTypes() + " for the container: "
+ request.getContainerId() + " within the application: "
+ request.getAppId());
} else {
if (nodeAddress != null && !nodeAddress.isEmpty()) {
result = logCliHelper.dumpAContainerLogsForLogType(newOptions);
} else {
result = logCliHelper.dumpAContainerLogsForLogTypeWithoutNodeId(
newOptions);
}
}
if (result == -1) {
System.err.println(
"Unable to get logs for this container:"
+ containerIdStr + " for the application:"
+ appIdStr + " with the appOwner: " + appOwner);
System.err.println("The application: " + appIdStr
+ " is still running, and we can not get Container report "
+ "for the container: " + containerIdStr + ". Please try later "
+ "or after the application finishes.");
}
return result;
}
}
// If the application is not in the final state,
// we will provide the NodeHttpAddress and get the container logs
// by calling NodeManager webservice.
ContainerLogsRequest newRequest = getMatchedOptionForRunningApp(
request, useRegex, ignoreSizeLimit);
if (newRequest == null) {
return -1;
}
return printContainerLogsFromRunningApplication(getConf(), request,
logCliHelper, useRegex, ignoreSizeLimit);
}
private int fetchApplicationLogs(ContainerLogsRequest options,
LogCLIHelpers logCliHelper, boolean useRegex, boolean ignoreSizeLimit)
throws IOException, YarnException {
// If the application has finished, we would fetch the logs
// from HDFS.
// If the application is still running, we would get the full
// list of the containers first, then fetch the logs for each
// container from NM.
int resultCode = -1;
if (options.isAppFinished()) {
ContainerLogsRequest newOptions = getMatchedLogOptions(
options, logCliHelper, useRegex, ignoreSizeLimit);
if (newOptions == null) {
System.err.println("Can not find any log file matching the pattern: "
+ options.getLogTypes() + " for the application: "
+ options.getAppId());
} else {
resultCode =
logCliHelper.dumpAllContainersLogs(newOptions);
}
} else {
List<ContainerLogsRequest> containerLogRequests =
getContainersLogRequestForRunningApplication(options);
// get all matched container log types and check the total log size.
Map<String, ContainerLogsRequest> matchedLogTypes =
getMatchedLogTypesForRunningApp(containerLogRequests,
useRegex, ignoreSizeLimit);
for (Entry<String, ContainerLogsRequest> container
: matchedLogTypes.entrySet()) {
int result = printContainerLogsFromRunningApplication(getConf(),
container.getValue(), logCliHelper,
useRegex, ignoreSizeLimit);
if (result == 0) {
resultCode = 0;
}
}
}
if (resultCode == -1) {
System.err.println("Can not find the logs for the application: "
+ options.getAppId() + " with the appOwner: "
+ options.getAppOwner());
}
return resultCode;
}
private String guessAppOwner(ApplicationReport appReport,
ApplicationId appId) throws IOException {
String appOwner = null;
if (appReport != null) {
//always use the app owner from the app report if possible
appOwner = appReport.getUser();
} else {
appOwner = UserGroupInformation.getCurrentUser().getShortUserName();
appOwner = LogCLIHelpers.getOwnerForAppIdOrNull(
appId, appOwner, getConf());
}
return appOwner;
}
private ContainerLogsRequest getMatchedLogOptions(
ContainerLogsRequest request, LogCLIHelpers logCliHelper,
boolean useRegex, boolean ignoreSizeLimit) throws IOException {
ContainerLogsRequest newOptions = new ContainerLogsRequest(request);
Set<ContainerLogFileInfo> files = logCliHelper.listContainerLogs(
request);
Set<String> matchedFiles = getMatchedLogFiles(request, files,
useRegex, ignoreSizeLimit);
if (matchedFiles.isEmpty()) {
return null;
} else {
newOptions.setLogTypes(matchedFiles);
return newOptions;
}
}
private Set<String> getMatchedLogFiles(ContainerLogsRequest options,
Collection<ContainerLogFileInfo> candidate, boolean useRegex,
boolean ignoreSizeLimit) throws IOException {
Set<String> matchedFiles = new HashSet<String>();
Set<String> filePattern = options.getLogTypes();
long size = options.getBytes();
boolean getAll = options.getLogTypes().contains("ALL");
Iterator<ContainerLogFileInfo> iterator = candidate.iterator();
while(iterator.hasNext()) {
boolean matchedFile = false;
ContainerLogFileInfo logInfo = iterator.next();
if (getAll) {
matchedFile = true;
} else if (useRegex) {
if (isFileMatching(logInfo.getFileName(), filePattern)) {
matchedFile = true;
}
} else {
if (filePattern.contains(logInfo.getFileName())) {
matchedFile = true;
}
}
if (matchedFile) {
matchedFiles.add(logInfo.getFileName());
if (!ignoreSizeLimit) {
decrLogSizeLimit(Math.min(
Long.parseLong(logInfo.getFileSize()), size));
if (getLogSizeLimitLeft() < 0) {
throw new RuntimeException("The total log size is too large."
+ "The log size limit is " + specifedLogLimits + "MB. "
+ "Please specify a proper value --size option or if you "
+ "really want to fetch all, please "
+ "specify -1 for --size_limit_mb option.");
}
}
}
}
return matchedFiles;
}
private boolean isFileMatching(String fileType,
Set<String> logTypes) {
for (String logType : logTypes) {
Pattern filterPattern = Pattern.compile(logType);
boolean match = filterPattern.matcher(fileType).find();
if (match) {
return true;
}
}
return false;
}
private List<ContainerLogsRequest>
getContainersLogRequestForRunningApplication(
ContainerLogsRequest options) throws YarnException, IOException {
List<ContainerLogsRequest> newOptionsList =
new ArrayList<ContainerLogsRequest>();
List<ContainerReport> reports =
getContainerReportsFromRunningApplication(options);
for (ContainerReport container : reports) {
ContainerLogsRequest newOptions = new ContainerLogsRequest(options);
newOptions.setContainerId(container.getContainerId().toString());
newOptions.setNodeId(container.getAssignedNode().toString());
String httpAddress = container.getNodeHttpAddress();
if (httpAddress != null && !httpAddress.isEmpty()) {
newOptions.setNodeHttpAddress(httpAddress
.replaceFirst(WebAppUtils.getHttpSchemePrefix(getConf()), ""));
}
newOptions.setContainerState(container.getContainerState());
newOptionsList.add(newOptions);
}
return newOptionsList;
}
private List<ContainerReport> getContainerReportsFromRunningApplication(
ContainerLogsRequest options) throws YarnException, IOException {
List<ContainerReport> reports = new ArrayList<ContainerReport>();
List<ApplicationAttemptReport> attempts =
yarnClient.getApplicationAttempts(options.getAppId());
Map<ContainerId, ContainerReport> containerMap = new TreeMap<
ContainerId, ContainerReport>();
for (ApplicationAttemptReport attempt : attempts) {
List<ContainerReport> containers = yarnClient.getContainers(
attempt.getApplicationAttemptId());
for (ContainerReport container : containers) {
if (!containerMap.containsKey(container.getContainerId())) {
containerMap.put(container.getContainerId(), container);
}
}
}
reports.addAll(containerMap.values());
return reports;
}
// filter the containerReports based on the nodeId and ContainerId
private List<ContainerReport> filterContainersInfo(
ContainerLogsRequest options, List<ContainerReport> containers) {
List<ContainerReport> filterReports = new ArrayList<ContainerReport>(
containers);
String nodeId = options.getNodeId();
boolean filterBasedOnNodeId = (nodeId != null && !nodeId.isEmpty());
String containerId = options.getContainerId();
boolean filterBasedOnContainerId = (containerId != null
&& !containerId.isEmpty());
if (filterBasedOnNodeId || filterBasedOnContainerId) {
// filter the reports based on the containerId and.or nodeId
for(ContainerReport report : containers) {
if (filterBasedOnContainerId) {
if (!report.getContainerId().toString()
.equalsIgnoreCase(containerId)) {
filterReports.remove(report);
}
}
if (filterBasedOnNodeId) {
if (!report.getAssignedNode().toString().equalsIgnoreCase(nodeId)) {
filterReports.remove(report);
}
}
}
}
return filterReports;
}
private int printContainerInfoFromRunningApplication(
ContainerLogsRequest options, LogCLIHelpers logCliHelper)
throws YarnException, IOException, ProcessingException,
IllegalStateException, JSONException {
String containerIdStr = options.getContainerId();
String nodeIdStr = options.getNodeId();
List<ContainerReport> reports =
getContainerReportsFromRunningApplication(options);
List<ContainerReport> filteredReports = filterContainersInfo(
options, reports);
if (filteredReports.isEmpty()) {
// if we specify the containerId as well as NodeAddress
String nodeHttpAddress = null;
if (options.getContainerId() != null
&& !options.getContainerId().isEmpty()) {
nodeHttpAddress = getNodeHttpAddressFromRMWebString(options);
}
if (nodeHttpAddress != null) {
outputContainerLogMeta(options.getContainerId(), options.getNodeId(),
nodeHttpAddress);
return 0;
} else {
int result = logCliHelper.printAContainerLogMetadata(
options, System.out, System.err);
if (result == -1) {
StringBuilder sb = new StringBuilder();
if (containerIdStr != null && !containerIdStr.isEmpty()) {
sb.append("Trying to get container with ContainerId: "
+ containerIdStr + "\n");
}
if (nodeIdStr != null && !nodeIdStr.isEmpty()) {
sb.append("Trying to get container from NodeManager: "
+ nodeIdStr + "\n");
}
sb.append("Can not find any matched containers for the application: "
+ options.getAppId());
System.err.println(sb.toString());
}
return result;
}
}
for (ContainerReport report : filteredReports) {
String nodeId = report.getAssignedNode().toString();
String nodeHttpAddress = report.getNodeHttpAddress().replaceFirst(
WebAppUtils.getHttpSchemePrefix(getConf()), "");
String containerId = report.getContainerId().toString();
outputContainerLogMeta(containerId, nodeId, nodeHttpAddress);
}
return 0;
}
private void outputContainerLogMeta(String containerId, String nodeId,
String nodeHttpAddress) throws IOException {
String containerString = String.format(
LogCLIHelpers.CONTAINER_ON_NODE_PATTERN, containerId, nodeId);
outStream.println(containerString);
outStream.println(StringUtils.repeat("=", containerString.length()));
outStream.printf(LogCLIHelpers.PER_LOG_FILE_INFO_PATTERN,
"LogFile", "LogLength", "LastModificationTime", "LogAggregationType");
outStream.println(StringUtils.repeat("=", containerString.length() * 2));
List<Pair<ContainerLogFileInfo, String>> infos = getContainerLogFiles(
getConf(), containerId, nodeHttpAddress);
for (Pair<ContainerLogFileInfo, String> info : infos) {
outStream.printf(LogCLIHelpers.PER_LOG_FILE_INFO_PATTERN,
info.getKey().getFileName(), info.getKey().getFileSize(),
info.getKey().getLastModifiedTime(), info.getValue());
}
}
@VisibleForTesting
public Set<String> getMatchedContainerLogFiles(ContainerLogsRequest request,
boolean useRegex, boolean ignoreSizeLimit) throws IOException {
// fetch all the log files for the container
// filter the log files based on the given -log_files pattern
List<Pair<ContainerLogFileInfo, String>> allLogFileInfos=
getContainerLogFiles(getConf(), request.getContainerId(),
request.getNodeHttpAddress());
List<ContainerLogFileInfo> fileNames = new ArrayList<
ContainerLogFileInfo>();
for (Pair<ContainerLogFileInfo, String> fileInfo : allLogFileInfos) {
fileNames.add(fileInfo.getKey());
}
return getMatchedLogFiles(request, fileNames,
useRegex, ignoreSizeLimit);
}
@VisibleForTesting
public Response getResponseFromNMWebService(Configuration conf,
Client webServiceClient, ContainerLogsRequest request, String logFile) {
return LogToolUtils.getResponseFromNMWebService(
conf, webServiceClient, request, logFile);
}
@VisibleForTesting
public String getNodeHttpAddressFromRMWebString(ContainerLogsRequest request)
throws ProcessingException, IllegalStateException, JSONException {
if (request.getNodeId() == null || request.getNodeId().isEmpty()) {
return null;
}
JSONObject nodeInfo = YarnWebServiceUtils
.getNodeInfoFromRMWebService(getConf(), request.getNodeId())
.getJSONObject("node");
return nodeInfo.has("nodeHTTPAddress") ?
nodeInfo.getString("nodeHTTPAddress") : null;
}
// Class to handle retry
private static final | LogsCLI |
java | elastic__elasticsearch | x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/content/ParserUtils.java | {
"start": 843,
"end": 7083
} | class ____ {
private ParserUtils() {}
public static boolean booleanValue(JsonParser p) throws IOException {
JsonToken token = p.currentToken();
if (token == JsonToken.VALUE_STRING) {
return Booleans.parseBoolean(p.getTextCharacters(), p.getTextOffset(), p.getTextLength(), false /* irrelevant */);
}
return p.getBooleanValue();
}
public static Integer intValue(JsonParser p) throws IOException {
JsonToken token = p.currentToken();
if (token == JsonToken.VALUE_STRING) {
String text = text(p);
double doubleValue = Double.parseDouble(text);
if (doubleValue < Integer.MIN_VALUE || doubleValue > Integer.MAX_VALUE) {
throw new IllegalArgumentException("Value [" + text + "] is out of range for an integer");
}
return (int) doubleValue;
}
return p.getIntValue();
}
public static String text(JsonParser p) throws IOException {
JsonToken current = p.currentToken();
if (current.isScalarValue()) {
return p.getText();
}
throw new IllegalStateException("Can't get text on a " + current + " at " + location(p));
}
public static Map<String, Object> map(JsonParser p) throws IOException {
return readMapSafe(p, HashMap::new);
}
public static Map<String, Object> mapOrdered(JsonParser p) throws IOException {
return readMapSafe(p, LinkedHashMap::new);
}
public static Map<String, Object> readMapSafe(JsonParser p, Supplier<Map<String, Object>> mapFactory) throws IOException {
final Map<String, Object> map = mapFactory.get();
return findNonEmptyMapStart(p) ? readMapEntries(p, mapFactory, map) : map;
}
private static boolean findNonEmptyMapStart(JsonParser parser) throws IOException {
JsonToken token = parser.currentToken();
if (token == null) {
token = parser.nextToken();
}
if (token == JsonToken.START_OBJECT) {
token = parser.nextToken();
}
return token == JsonToken.FIELD_NAME;
}
// Read a map without bounds checks from a parser that is assumed to be at the map's first field's name token
private static Map<String, Object> readMapEntries(JsonParser parser, Supplier<Map<String, Object>> mapFactory, Map<String, Object> map)
throws IOException {
assert parser.currentToken() == JsonToken.FIELD_NAME : "Expected field name but saw [" + parser.currentToken() + "]";
do {
// Must point to field name
String fieldName = parser.currentName();
// And then the value...
Object value = readValueUnsafe(parser.nextToken(), parser, mapFactory);
map.put(fieldName, value);
} while (parser.nextToken() == JsonToken.FIELD_NAME);
return map;
}
public static List<Object> list(JsonParser parser) throws IOException {
skipToListStart(parser);
return readListUnsafe(parser, HashMap::new);
}
public static List<Object> listOrderedMap(JsonParser parser) throws IOException {
skipToListStart(parser);
return readListUnsafe(parser, LinkedHashMap::new);
}
// Skips the current parser to the next array start. Assumes that the parser is either positioned before an array field's name token or
// on the start array token.
private static void skipToListStart(JsonParser parser) throws IOException {
JsonToken token = parser.currentToken();
if (token == null) {
token = parser.nextToken();
}
if (token == JsonToken.FIELD_NAME) {
token = parser.nextToken();
}
if (token != JsonToken.START_ARRAY) {
throw new ParseException(location(parser), "Failed to parse list: expecting " + JsonToken.START_ARRAY + " but got " + token);
}
}
private static Object readValueUnsafe(JsonToken currentToken, JsonParser parser, Supplier<Map<String, Object>> mapFactory)
throws IOException {
if (currentToken != parser.currentToken()) {
throw new ParseException(
"Supplied current token ["
+ currentToken
+ "] is different from actual parser current token ["
+ parser.currentToken()
+ "]"
);
}
switch (currentToken) {
case VALUE_STRING:
return text(parser);
case VALUE_NUMBER_FLOAT:
case VALUE_NUMBER_INT:
return parser.getNumberValue();
case VALUE_FALSE:
case VALUE_TRUE:
return parser.getBooleanValue();
case START_OBJECT: {
final Map<String, Object> map = mapFactory.get();
return parser.nextToken() != JsonToken.FIELD_NAME ? map : readMapEntries(parser, mapFactory, map);
}
case START_ARRAY:
return readListUnsafe(parser, mapFactory);
case VALUE_EMBEDDED_OBJECT:
return parser.getBinaryValue();
case VALUE_NULL:
default:
return null;
}
}
private static List<Object> readListUnsafe(JsonParser parser, Supplier<Map<String, Object>> mapFactory) throws IOException {
if (parser.currentToken() != JsonToken.START_ARRAY) {
throw new ParseException(location(parser), "Expected START_ARRAY but got [" + parser.currentToken() + "]");
}
ArrayList<Object> list = new ArrayList<>();
for (JsonToken token = parser.nextToken(); token != null && token != JsonToken.END_ARRAY; token = parser.nextToken()) {
list.add(readValueUnsafe(token, parser, mapFactory));
}
return list;
}
public static ContentLocation location(JsonParser p) {
return location(p.getTokenLocation());
}
public static ContentLocation location(JsonLocation tokenLocation) {
return tokenLocation != null
? new ContentLocation(tokenLocation.getLineNr(), tokenLocation.getColumnNr())
: ContentLocation.UNKNOWN;
}
}
| ParserUtils |
java | alibaba__nacos | core/src/main/java/com/alibaba/nacos/core/distributed/distro/task/DistroTaskEngineHolder.java | {
"start": 1270,
"end": 2440
} | class ____ implements DisposableBean {
private final DistroDelayTaskExecuteEngine delayTaskExecuteEngine = new DistroDelayTaskExecuteEngine();
private final DistroExecuteTaskExecuteEngine executeWorkersManager = new DistroExecuteTaskExecuteEngine();
public DistroTaskEngineHolder(DistroComponentHolder distroComponentHolder) {
DistroDelayTaskProcessor defaultDelayTaskProcessor = new DistroDelayTaskProcessor(this, distroComponentHolder);
delayTaskExecuteEngine.setDefaultTaskProcessor(defaultDelayTaskProcessor);
}
public DistroDelayTaskExecuteEngine getDelayTaskExecuteEngine() {
return delayTaskExecuteEngine;
}
public DistroExecuteTaskExecuteEngine getExecuteWorkersManager() {
return executeWorkersManager;
}
public void registerNacosTaskProcessor(Object key, NacosTaskProcessor nacosTaskProcessor) {
this.delayTaskExecuteEngine.addProcessor(key, nacosTaskProcessor);
}
@Override
public void destroy() throws Exception {
this.delayTaskExecuteEngine.shutdown();
this.executeWorkersManager.shutdown();
}
}
| DistroTaskEngineHolder |
java | spring-projects__spring-boot | module/spring-boot-webmvc/src/test/java/org/springframework/boot/webmvc/autoconfigure/actuate/web/CompositeHandlerExceptionResolverTests.java | {
"start": 3793,
"end": 4074
} | class ____ implements HandlerExceptionResolver {
@Override
public ModelAndView resolveException(HttpServletRequest request, HttpServletResponse response,
@Nullable Object handler, Exception ex) {
return new ModelAndView("test-view");
}
}
}
| TestHandlerExceptionResolver |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/Encryptor.java | {
"start": 1876,
"end": 2915
} | interface ____ with direct ByteBuffers.
* <p>
* This function does not always encrypt the entire buffer and may potentially
* need to be called multiple times to process an entire buffer. The object
* may hold the encryption context internally.
* <p>
* Some implementations may require sufficient space in the destination
* buffer to encrypt the entire input buffer.
* <p>
* Upon return, inBuffer.position() will be advanced by the number of bytes
* read and outBuffer.position() by bytes written. Implementations should
* not modify inBuffer.limit() and outBuffer.limit().
* <p>
* @param inBuffer a direct {@link ByteBuffer} to read from. inBuffer may
* not be null and inBuffer.remaining() must be > 0
* @param outBuffer a direct {@link ByteBuffer} to write to. outBuffer may
* not be null and outBuffer.remaining() must be > 0
* @throws IOException if encryption fails
*/
public void encrypt(ByteBuffer inBuffer, ByteBuffer outBuffer)
throws IOException;
}
| encrypting |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformCheckpointStatsTests.java | {
"start": 445,
"end": 1457
} | class ____ extends AbstractWireSerializingTestCase<TransformCheckpointStats> {
public static TransformCheckpointStats randomTransformCheckpointStats() {
return new TransformCheckpointStats(
randomLongBetween(1, 1_000_000),
TransformIndexerPositionTests.randomTransformIndexerPosition(),
randomBoolean() ? null : TransformProgressTests.randomTransformProgress(),
randomLongBetween(1, 1_000_000),
randomLongBetween(0, 1_000_000)
);
}
@Override
protected TransformCheckpointStats createTestInstance() {
return randomTransformCheckpointStats();
}
@Override
protected TransformCheckpointStats mutateInstance(TransformCheckpointStats instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
@Override
protected Reader<TransformCheckpointStats> instanceReader() {
return TransformCheckpointStats::new;
}
}
| TransformCheckpointStatsTests |
java | apache__camel | components/camel-telemetry-dev/src/test/java/org/apache/camel/telemetrydev/DisableEndpointTest.java | {
"start": 1375,
"end": 3868
} | class ____ extends TelemetryDevTracerTestSupport {
@Override
protected CamelContext createCamelContext() throws Exception {
TelemetryDevTracer tst = new TelemetryDevTracer();
tst.setTraceFormat("json");
tst.setTraceProcessors(true);
tst.setExcludePatterns("log*,to*,setVariable*");
CamelContext context = super.createCamelContext();
CamelContextAware.trySetCamelContext(tst, context);
tst.init(context);
return context;
}
@Test
void testProcessorsTraceRequest() throws IOException {
template.sendBody("direct:start", "my-body");
Map<String, DevTrace> traces = tracesFromLog();
assertEquals(1, traces.size());
checkTrace(traces.values().iterator().next());
}
@Test
void testExcludedVariableIsPresent() throws InterruptedException {
MockEndpoint endpoint = context().getEndpoint("mock:variable", MockEndpoint.class);
endpoint.expectedMessageCount(1);
template.sendBody("direct:variable", "Test Message");
endpoint.assertIsSatisfied();
Exchange first = endpoint.getReceivedExchanges().get(0);
String myVar = first.getVariable("myVar", String.class);
Assertions.assertEquals("testValue", myVar);
}
private void checkTrace(DevTrace trace) {
List<DevSpanAdapter> spans = trace.getSpans();
assertEquals(2, spans.size());
DevSpanAdapter testProducer = spans.get(0);
DevSpanAdapter direct = spans.get(1);
// Validate span completion
assertEquals("true", testProducer.getTag("isDone"));
assertEquals("true", direct.getTag("isDone"));
// Validate same trace
assertEquals(testProducer.getTag("traceid"), direct.getTag("traceid"));
// Validate hierarchy
assertNull(testProducer.getTag("parentSpan"));
assertEquals(testProducer.getTag("spanid"), direct.getTag("parentSpan"));
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.routeId("start")
.log("A message")
.to("log:info");
from("direct:variable")
.setVariable("myVar", constant("testValue"))
.to("mock:variable");
}
};
}
}
| DisableEndpointTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/typeoverride/Entity.java | {
"start": 178,
"end": 508
} | class ____ {
private long id;
private String name;
public Entity() {
}
public Entity(String name) {
this.name = name;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
| Entity |
java | apache__camel | components/camel-pqc/src/test/java/org/apache/camel/component/pqc/PQCBIKEGenerateEncapsulationAESNoAutowiredTest.java | {
"start": 1549,
"end": 4086
} | class ____ extends CamelTestSupport {
@EndpointInject("mock:encapsulate")
protected MockEndpoint resultEncapsulate;
@Produce("direct:encapsulate")
protected ProducerTemplate templateEncapsulate;
@EndpointInject("mock:extract")
protected MockEndpoint resultExtract;
public PQCBIKEGenerateEncapsulationAESNoAutowiredTest() throws NoSuchAlgorithmException {
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:encapsulate").to(
"pqc:keyenc?operation=generateSecretKeyEncapsulation&symmetricKeyAlgorithm=AES&keyEncapsulationAlgorithm=BIKE")
.to("mock:encapsulate")
.to("pqc:keyenc?operation=extractSecretKeyEncapsulation&symmetricKeyAlgorithm=AES&keyEncapsulationAlgorithm=BIKE")
.to("mock:extract");
}
};
}
@BeforeAll
public static void startup() throws Exception {
Security.addProvider(new BouncyCastleProvider());
Security.addProvider(new BouncyCastlePQCProvider());
}
@Test
void testSignAndVerify() throws Exception {
resultEncapsulate.expectedMessageCount(1);
resultExtract.expectedMessageCount(1);
templateEncapsulate.sendBody("Hello");
resultEncapsulate.assertIsSatisfied();
assertNotNull(resultEncapsulate.getExchanges().get(0).getMessage().getBody(SecretKeyWithEncapsulation.class));
assertEquals(PQCSymmetricAlgorithms.AES.getAlgorithm(),
resultEncapsulate.getExchanges().get(0).getMessage().getBody(SecretKeyWithEncapsulation.class).getAlgorithm());
SecretKeyWithEncapsulation secEncrypted
= resultEncapsulate.getExchanges().get(0).getMessage().getBody(SecretKeyWithEncapsulation.class);
assertNotNull(resultExtract.getExchanges().get(0).getMessage().getBody(SecretKeyWithEncapsulation.class));
assertEquals(PQCSymmetricAlgorithms.AES.getAlgorithm(),
resultExtract.getExchanges().get(0).getMessage().getBody(SecretKeyWithEncapsulation.class).getAlgorithm());
SecretKeyWithEncapsulation secEncryptedExtracted
= resultExtract.getExchanges().get(0).getMessage().getBody(SecretKeyWithEncapsulation.class);
assertTrue(Arrays.areEqual(secEncrypted.getEncoded(), secEncryptedExtracted.getEncoded()));
}
}
| PQCBIKEGenerateEncapsulationAESNoAutowiredTest |
java | quarkusio__quarkus | independent-projects/qute/core/src/test/java/io/quarkus/qute/FragmentTest.java | {
"start": 400,
"end": 7618
} | class ____ {
@Test
public void testSimpleFragment() {
Engine engine = Engine.builder().addDefaults().build();
Template template = engine
.parse("PREFIX {#fragment id='foo_and_bar'}{foo}{/} {#fragment another}{foo}{/}SUFFIX",
Variant.forContentType(Variant.TEXT_PLAIN), "fragments.html");
assertEquals("OK", template.getFragment("foo_and_bar").data("foo", "OK").render());
assertEquals("NOK", template.getFragment("another").data("foo", "NOK").render());
assertFalse(template.isFragment());
Fragment another = template.getFragment("another");
assertTrue(another.isFragment());
assertEquals("another", another.getId());
assertEquals(template.getFragment("another").getGeneratedId(), another.getGeneratedId());
assertEquals("fragments.html", template.getFragment("another").getOriginalTemplate().getId());
assertEquals(Set.of("foo_and_bar", "another"), template.getFragmentIds());
List<TemplateNode> anotherNodes = another.getNodes();
assertEquals(1, anotherNodes.size());
assertTrue(anotherNodes.get(0).isExpression());
}
@Test
public void testNestedFragment() {
Engine engine = Engine.builder().addDefaults().build();
Template template = engine
.parse("PREFIX {#fragment id=foo_and_bar}{foo}{#fragment another}{foo}{/}{/} SUFFIX",
Variant.forContentType(Variant.TEXT_PLAIN), "fragments.html");
assertEquals("OKOK", template.getFragment("foo_and_bar").data("foo", "OK").render());
assertEquals("NOK", template.getFragment("another").data("foo", "NOK").render());
assertEquals("NOK", template.getFragment("foo_and_bar").getFragment("another").data("foo", "NOK").render());
assertEquals("NOKNOK", template.getFragment("foo_and_bar").getFragment("another").getFragment("foo_and_bar")
.data("foo", "NOK").render());
}
@Test
public void testNonUniqueIds() {
Engine engine = Engine.builder().addDefaults().build();
TemplateException expected = assertThrows(TemplateException.class,
() -> engine.parse("{#fragment id=another}{foo}{/}{#fragment another}{foo}{/}", null, "bum.html"));
assertEquals(FragmentSectionHelper.Code.NON_UNIQUE_FRAGMENT_ID, expected.getCode());
assertEquals("Parser error in template [bum.html:1]: found a non-unique fragment identifier: [another]",
expected.getMessage());
}
@Test
public void testInvisibleFragment() {
Engine engine = Engine.builder().addDefaults().build();
Template foo = engine.parse(
"PREFIX::{#fragment foo _hidden}FOO{/fragment}::{#include $foo /}::{#include $foo /}", null, "foo");
assertEquals("PREFIX::::FOO::FOO", foo.render());
assertEquals("FOO", foo.getFragment("foo").render());
}
@Test
public void testFrgNamespace() {
Engine engine = Engine.builder()
.addDefaults()
.addNamespaceResolver(new FragmentNamespaceResolver())
.addValueResolver(new ReflectionValueResolver())
.build();
Template foo = engine.parse(
"PREFIX::{#fragment foo rendered=false}FOO{/fragment}::{frg:foo.toLowerCase}::{#include $foo /}", null, "foo");
assertEquals("PREFIX::::foo::FOO", foo.render());
// Fragment from another template
engine.putTemplate("bar", engine.parse("""
{#fragment barbar _hidden}
Barbar is here!
{/}
"""));
assertEquals("Barbar is here!", engine.parse("{frg:bar$barbar}").render().strip());
assertThrows(TemplateException.class, () -> engine.parse("{frg:nonexistent$barbar}").render());
}
@Test
public void testCapture() {
Engine engine = Engine.builder()
.addDefaults()
.addNamespaceResolver(new FragmentNamespaceResolver(FragmentNamespaceResolver.CAP))
.addValueResolver(new ReflectionValueResolver())
.build();
Template foo = engine.parse(
"PREFIX::{#capture foo}FOO{/capture}::{cap:foo.toLowerCase}::{#include $foo /}", null, "foo");
assertEquals("PREFIX::::foo::FOO", foo.render());
}
@Test
public void testCaptureArgs() {
Engine engine = Engine.builder()
.addDefaults()
.addNamespaceResolver(new FragmentNamespaceResolver(FragmentNamespaceResolver.CAPTURE))
.addNamespaceResolver(new NamedArgument.ParamNamespaceResolver())
.addValueResolver(new NamedArgument.SetValueResolver())
.addValueResolver(new ReflectionValueResolver())
.build();
Template foo = engine.parse(
"PREFIX::{#capture foo}{name} {surname}{/capture}::{capture:foo(param:name = 'Ondik',param:surname.set(mySurname)).toLowerCase}",
null, "foo");
assertEquals("PREFIX::::ondik kouba", foo.data("mySurname", "Kouba").render());
}
@Test
public void testInvalidId() {
Engine engine = Engine.builder().addDefaults().build();
TemplateException expected = assertThrows(TemplateException.class,
() -> engine.parse("{#fragment id='another and foo'}{/}", null, "bum.html"));
assertEquals(FragmentSectionHelper.Code.INVALID_FRAGMENT_ID, expected.getCode());
assertEquals(
"Parser error in template [bum.html:1]: found an invalid fragment identifier [another and foo] - an identifier can only consist of alphanumeric characters and underscores",
expected.getMessage());
}
@Test
public void testNestedFragmentRendered() {
Engine engine = Engine.builder().addDefaults().build();
Template alpha = engine.parse("""
OK
{#fragment id=\"nested\" rendered=false}
NOK
{/}
{#fragment id=\"visible\"}
01
{/fragment}
""");
engine.putTemplate("alpha", alpha);
assertEquals("OK01", alpha.render().replaceAll("\\s", ""));
assertEquals("NOK", alpha.getFragment("nested").render().trim());
Template bravo = engine.parse("""
{#include $nested}
{#fragment id=\"nested\" rendered=false}
OK
{/}
""");
assertEquals("OK", bravo.render().trim());
assertEquals("OK", bravo.getFragment("nested").render().trim());
assertEquals("NOK", engine.parse("{#include alpha$nested /}").render().trim());
Template charlie = engine.parse("{#include alpha /}");
assertEquals("OK01", charlie.render().replaceAll("\\s", ""));
Template delta = engine.parse("""
{#fragment id=\"nested\" rendered=false}
{#include alpha /}
{/}
""");
assertEquals("OK01", delta.getFragment("nested").render().replaceAll("\\s", ""));
}
}
| FragmentTest |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.