index int64 0 0 | repo_id stringlengths 26 205 | file_path stringlengths 51 246 | content stringlengths 8 433k | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark/SparkFilters.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.iceberg.spark;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.netflix.iceberg.Schema;
import com.netflix.iceberg.expressions.Binder;
import com.netflix.iceberg.expressions.BoundReference;
import com.netflix.iceberg.expressions.Expression;
import com.netflix.iceberg.expressions.Expression.Operation;
import com.netflix.iceberg.expressions.ExpressionVisitors;
import com.netflix.iceberg.expressions.Literal;
import org.apache.spark.sql.Column;
import org.apache.spark.sql.catalyst.expressions.And$;
import org.apache.spark.sql.catalyst.expressions.Not$;
import org.apache.spark.sql.catalyst.expressions.Or$;
import org.apache.spark.sql.catalyst.util.DateTimeUtils;
import org.apache.spark.sql.functions$;
import org.apache.spark.sql.sources.And;
import org.apache.spark.sql.sources.EqualNullSafe;
import org.apache.spark.sql.sources.EqualTo;
import org.apache.spark.sql.sources.Filter;
import org.apache.spark.sql.sources.GreaterThan;
import org.apache.spark.sql.sources.GreaterThanOrEqual;
import org.apache.spark.sql.sources.In;
import org.apache.spark.sql.sources.IsNotNull;
import org.apache.spark.sql.sources.IsNull;
import org.apache.spark.sql.sources.LessThan;
import org.apache.spark.sql.sources.LessThanOrEqual;
import org.apache.spark.sql.sources.Not;
import org.apache.spark.sql.sources.Or;
import java.sql.Date;
import java.sql.Timestamp;
import java.util.Map;
import static com.netflix.iceberg.expressions.ExpressionVisitors.visit;
import static com.netflix.iceberg.expressions.Expressions.alwaysFalse;
import static com.netflix.iceberg.expressions.Expressions.and;
import static com.netflix.iceberg.expressions.Expressions.equal;
import static com.netflix.iceberg.expressions.Expressions.greaterThan;
import static com.netflix.iceberg.expressions.Expressions.greaterThanOrEqual;
import static com.netflix.iceberg.expressions.Expressions.isNull;
import static com.netflix.iceberg.expressions.Expressions.lessThan;
import static com.netflix.iceberg.expressions.Expressions.lessThanOrEqual;
import static com.netflix.iceberg.expressions.Expressions.not;
import static com.netflix.iceberg.expressions.Expressions.notNull;
import static com.netflix.iceberg.expressions.Expressions.or;
public class SparkFilters {
private SparkFilters() {
}
private static final Map<Class<? extends Filter>, Operation> FILTERS = ImmutableMap
.<Class<? extends Filter>, Operation>builder()
.put(EqualTo.class, Operation.EQ)
.put(EqualNullSafe.class, Operation.EQ)
.put(GreaterThan.class, Operation.GT)
.put(GreaterThanOrEqual.class, Operation.GT_EQ)
.put(LessThan.class, Operation.LT)
.put(LessThanOrEqual.class, Operation.LT_EQ)
.put(In.class, Operation.IN)
.put(IsNull.class, Operation.IS_NULL)
.put(IsNotNull.class, Operation.NOT_NULL)
.put(And.class, Operation.AND)
.put(Or.class, Operation.OR)
.put(Not.class, Operation.NOT)
.build();
public static Expression convert(Filter filter) {
// avoid using a chain of if instanceof statements by mapping to the expression enum.
Operation op = FILTERS.get(filter.getClass());
if (op != null) {
switch (op) {
case IS_NULL:
IsNull isNullFilter = (IsNull) filter;
return isNull(isNullFilter.attribute());
case NOT_NULL:
IsNotNull notNullFilter = (IsNotNull) filter;
return notNull(notNullFilter.attribute());
case LT:
LessThan lt = (LessThan) filter;
return lessThan(lt.attribute(), convertLiteral(lt.value()));
case LT_EQ:
LessThanOrEqual ltEq = (LessThanOrEqual) filter;
return lessThanOrEqual(ltEq.attribute(), convertLiteral(ltEq.value()));
case GT:
GreaterThan gt = (GreaterThan) filter;
return greaterThan(gt.attribute(), convertLiteral(gt.value()));
case GT_EQ:
GreaterThanOrEqual gtEq = (GreaterThanOrEqual) filter;
return greaterThanOrEqual(gtEq.attribute(), convertLiteral(gtEq.value()));
case EQ: // used for both eq and null-safe-eq
if (filter instanceof EqualTo) {
EqualTo eq = (EqualTo) filter;
// comparison with null in normal equality is always null. this is probably a mistake.
Preconditions.checkNotNull(eq.value(),
"Expression is always false (eq is not null-safe): " + filter);
return equal(eq.attribute(), convertLiteral(eq.value()));
} else {
EqualNullSafe eq = (EqualNullSafe) filter;
if (eq.value() == null) {
return isNull(eq.attribute());
} else {
return equal(eq.attribute(), convertLiteral(eq.value()));
}
}
case IN:
In inFilter = (In) filter;
Expression in = alwaysFalse();
for (Object value : inFilter.values()) {
in = or(in, equal(inFilter.attribute(), convertLiteral(value)));
}
return in;
case NOT:
Not notFilter = (Not) filter;
Expression child = convert(notFilter.child());
if (child != null) {
return not(child);
}
return null;
case AND: {
And andFilter = (And) filter;
Expression left = convert(andFilter.left());
Expression right = convert(andFilter.right());
if (left != null && right != null) {
return and(left, right);
}
return null;
}
case OR: {
Or orFilter = (Or) filter;
Expression left = convert(orFilter.left());
Expression right = convert(orFilter.right());
if (left != null && right != null) {
return or(left, right);
}
return null;
}
}
}
return null;
}
private static Object convertLiteral(Object value) {
if (value instanceof Timestamp) {
return DateTimeUtils.fromJavaTimestamp((Timestamp) value);
} else if (value instanceof Date) {
return DateTimeUtils.fromJavaDate((Date) value);
}
return value;
}
}
| 6,600 |
0 | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark/PruneColumnsWithReordering.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.iceberg.spark;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.netflix.iceberg.Schema;
import com.netflix.iceberg.types.Type;
import com.netflix.iceberg.types.Type.TypeID;
import com.netflix.iceberg.types.TypeUtil;
import com.netflix.iceberg.types.Types;
import org.apache.spark.sql.types.ArrayType;
import org.apache.spark.sql.types.BinaryType;
import org.apache.spark.sql.types.BooleanType;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.DateType;
import org.apache.spark.sql.types.DecimalType;
import org.apache.spark.sql.types.DoubleType;
import org.apache.spark.sql.types.FloatType;
import org.apache.spark.sql.types.IntegerType;
import org.apache.spark.sql.types.LongType;
import org.apache.spark.sql.types.MapType;
import org.apache.spark.sql.types.StringType;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.sql.types.TimestampType;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Supplier;
public class PruneColumnsWithReordering extends TypeUtil.CustomOrderSchemaVisitor<Type> {
private final StructType requestedType;
private final Set<Integer> filterRefs;
private DataType current = null;
PruneColumnsWithReordering(StructType requestedType, Set<Integer> filterRefs) {
this.requestedType = requestedType;
this.filterRefs = filterRefs;
}
@Override
public Type schema(Schema schema, Supplier<Type> structResult) {
this.current = requestedType;
try {
return structResult.get();
} finally {
this.current = null;
}
}
@Override
public Type struct(Types.StructType struct, Iterable<Type> fieldResults) {
Preconditions.checkNotNull(struct, "Cannot prune null struct. Pruning must start with a schema.");
Preconditions.checkArgument(current instanceof StructType, "Not a struct: %s", current);
StructType s = (StructType) current;
List<Types.NestedField> fields = struct.fields();
List<Type> types = Lists.newArrayList(fieldResults);
boolean changed = false;
// use a LinkedHashMap to preserve the original order of filter fields that are not projected
Map<String, Types.NestedField> projectedFields = Maps.newLinkedHashMap();
for (int i = 0; i < fields.size(); i += 1) {
Types.NestedField field = fields.get(i);
Type type = types.get(i);
if (type == null) {
changed = true;
} else if (field.type() == type) {
projectedFields.put(field.name(), field);
} else if (field.isOptional()) {
changed = true;
projectedFields.put(field.name(),
Types.NestedField.optional(field.fieldId(), field.name(), type));
} else {
changed = true;
projectedFields.put(field.name(),
Types.NestedField.required(field.fieldId(), field.name(), type));
}
}
// Construct a new struct with the projected struct's order
boolean reordered = false;
StructField[] requestedFields = s.fields();
List<Types.NestedField> newFields = Lists.newArrayListWithExpectedSize(requestedFields.length);
for (int i = 0; i < requestedFields.length; i += 1) {
// fields are resolved by name because Spark only sees the current table schema.
String name = requestedFields[i].name();
if (!fields.get(i).name().equals(name)) {
reordered = true;
}
newFields.add(projectedFields.remove(name));
}
// Add remaining filter fields that were not explicitly projected
if (!projectedFields.isEmpty()) {
newFields.addAll(projectedFields.values());
changed = true; // order probably changed
}
if (reordered || changed) {
return Types.StructType.of(newFields);
}
return struct;
}
@Override
public Type field(Types.NestedField field, Supplier<Type> fieldResult) {
Preconditions.checkArgument(current instanceof StructType, "Not a struct: %s", current);
StructType struct = (StructType) current;
// fields are resolved by name because Spark only sees the current table schema.
if (struct.getFieldIndex(field.name()).isEmpty()) {
// make sure that filter fields are projected even if they aren't in the requested schema.
if (filterRefs.contains(field.fieldId())) {
return field.type();
}
return null;
}
int fieldIndex = struct.fieldIndex(field.name());
StructField f = struct.fields()[fieldIndex];
Preconditions.checkArgument(f.nullable() || field.isRequired(),
"Cannot project an optional field as non-null: %s", field.name());
this.current = f.dataType();
try {
return fieldResult.get();
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException(
"Invalid projection for field " + field.name() + ": " + e.getMessage(), e);
} finally {
this.current = struct;
}
}
@Override
public Type list(Types.ListType list, Supplier<Type> elementResult) {
Preconditions.checkArgument(current instanceof ArrayType, "Not an array: %s", current);
ArrayType array = (ArrayType) current;
Preconditions.checkArgument(array.containsNull() || !list.isElementOptional(),
"Cannot project an array of optional elements as required elements: %s", array);
this.current = array.elementType();
try {
Type elementType = elementResult.get();
if (list.elementType() == elementType) {
return list;
}
// must be a projected element type, create a new list
if (list.isElementOptional()) {
return Types.ListType.ofOptional(list.elementId(), elementType);
} else {
return Types.ListType.ofRequired(list.elementId(), elementType);
}
} finally {
this.current = array;
}
}
@Override
public Type map(Types.MapType map, Supplier<Type> keyResult, Supplier<Type> valueResult) {
Preconditions.checkArgument(current instanceof MapType, "Not a map: %s", current);
MapType m = (MapType) current;
Preconditions.checkArgument(m.valueContainsNull() || !map.isValueOptional(),
"Cannot project a map of optional values as required values: %s", map);
Preconditions.checkArgument(StringType.class.isInstance(m.keyType()),
"Invalid map key type (not string): %s", m.keyType());
this.current = m.valueType();
try {
Type valueType = valueResult.get();
if (map.valueType() == valueType) {
return map;
}
if (map.isValueOptional()) {
return Types.MapType.ofOptional(map.keyId(), map.valueId(), map.keyType(), valueType);
} else {
return Types.MapType.ofRequired(map.keyId(), map.valueId(), map.keyType(), valueType);
}
} finally {
this.current = m;
}
}
@Override
public Type primitive(Type.PrimitiveType primitive) {
Class<? extends DataType> expectedType = TYPES.get(primitive.typeId());
Preconditions.checkArgument(expectedType != null && expectedType.isInstance(current),
"Cannot project %s to incompatible type: %s", primitive, current);
// additional checks based on type
switch (primitive.typeId()) {
case DECIMAL:
Types.DecimalType decimal = (Types.DecimalType) primitive;
DecimalType d = (DecimalType) current;
Preconditions.checkArgument(d.scale() == decimal.scale(),
"Cannot project decimal with incompatible scale: %s != %s", d.scale(), decimal.scale());
Preconditions.checkArgument(d.precision() >= decimal.precision(),
"Cannot project decimal with incompatible precision: %s < %s",
d.precision(), decimal.precision());
break;
case TIMESTAMP:
Types.TimestampType timestamp = (Types.TimestampType) primitive;
Preconditions.checkArgument(timestamp.shouldAdjustToUTC(),
"Cannot project timestamp (without time zone) as timestamptz (with time zone)");
break;
default:
}
return primitive;
}
private static final Map<TypeID, Class<? extends DataType>> TYPES = ImmutableMap
.<TypeID, Class<? extends DataType>>builder()
.put(TypeID.BOOLEAN, BooleanType.class)
.put(TypeID.INTEGER, IntegerType.class)
.put(TypeID.LONG, LongType.class)
.put(TypeID.FLOAT, FloatType.class)
.put(TypeID.DOUBLE, DoubleType.class)
.put(TypeID.DATE, DateType.class)
.put(TypeID.TIMESTAMP, TimestampType.class)
.put(TypeID.DECIMAL, DecimalType.class)
.put(TypeID.UUID, StringType.class)
.put(TypeID.STRING, StringType.class)
.put(TypeID.FIXED, BinaryType.class)
.put(TypeID.BINARY, BinaryType.class)
.build();
}
| 6,601 |
0 | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark/FixupTypes.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.iceberg.spark;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.netflix.iceberg.Schema;
import com.netflix.iceberg.types.Type;
import com.netflix.iceberg.types.TypeUtil;
import com.netflix.iceberg.types.Types;
import java.util.List;
import java.util.function.Supplier;
/**
* This is used to fix primitive types to match a table schema. Some types, like binary and fixed,
* are converted to the same Spark type. Conversion back can produce only one, which may not be
* correct. This uses a reference schema to override types that were lost in round-trip conversion.
*/
class FixupTypes extends TypeUtil.CustomOrderSchemaVisitor<Type> {
private final Schema referenceSchema;
private Type sourceType;
static Schema fixup(Schema schema, Schema referenceSchema) {
return new Schema(TypeUtil.visit(schema,
new FixupTypes(referenceSchema)).asStructType().fields());
}
private FixupTypes(Schema referenceSchema) {
this.referenceSchema = referenceSchema;
this.sourceType = referenceSchema.asStruct();
}
@Override
public Type schema(Schema schema, Supplier<Type> future) {
this.sourceType = referenceSchema.asStruct();
return future.get();
}
@Override
public Type struct(Types.StructType struct, Iterable<Type> fieldTypes) {
Preconditions.checkArgument(sourceType.isStructType(), "Not a struct: " + sourceType);
List<Types.NestedField> fields = struct.fields();
int length = fields.size();
List<Type> types = Lists.newArrayList(fieldTypes);
List<Types.NestedField> newFields = Lists.newArrayListWithExpectedSize(length);
boolean hasChange = false;
for (int i = 0; i < length; i += 1) {
Types.NestedField field = fields.get(i);
Type resultType = types.get(i);
if (field.type() == resultType) {
newFields.add(field);
} else if (field.isRequired()) {
hasChange = true;
newFields.add(Types.NestedField.required(field.fieldId(), field.name(), resultType));
} else {
hasChange = true;
newFields.add(Types.NestedField.optional(field.fieldId(), field.name(), resultType));
}
}
if (hasChange) {
return Types.StructType.of(newFields);
}
return struct;
}
@Override
public Type field(Types.NestedField field, Supplier<Type> future) {
Preconditions.checkArgument(sourceType.isStructType(), "Not a struct: " + sourceType);
Types.StructType sourceStruct = sourceType.asStructType();
this.sourceType = sourceStruct.field(field.fieldId()).type();
try {
return future.get();
} finally {
sourceType = sourceStruct;
}
}
@Override
public Type list(Types.ListType list, Supplier<Type> elementTypeFuture) {
Preconditions.checkArgument(sourceType.isListType(), "Not a list: " + sourceType);
Types.ListType sourceList = sourceType.asListType();
this.sourceType = sourceList.elementType();
try {
Type elementType = elementTypeFuture.get();
if (list.elementType() == elementType) {
return list;
}
if (list.isElementOptional()) {
return Types.ListType.ofOptional(list.elementId(), elementType);
} else {
return Types.ListType.ofRequired(list.elementId(), elementType);
}
} finally {
this.sourceType = sourceList;
}
}
@Override
public Type map(Types.MapType map, Supplier<Type> keyTypeFuture, Supplier<Type> valueTypeFuture) {
Preconditions.checkArgument(sourceType.isMapType(), "Not a map: " + sourceType);
Types.MapType sourceMap = sourceType.asMapType();
try {
this.sourceType = sourceMap.keyType();
Type keyType = keyTypeFuture.get();
this.sourceType = sourceMap.valueType();
Type valueType = valueTypeFuture.get();
if (map.keyType() == keyType && map.valueType() == valueType) {
return map;
}
if (map.isValueOptional()) {
return Types.MapType.ofOptional(map.keyId(), map.valueId(), keyType, valueType);
} else {
return Types.MapType.ofRequired(map.keyId(), map.valueId(), keyType, valueType);
}
} finally {
this.sourceType = sourceMap;
}
}
@Override
public Type primitive(Type.PrimitiveType primitive) {
if (sourceType.equals(primitive)) {
return primitive; // already correct
}
switch (primitive.typeId()) {
case STRING:
if (sourceType.typeId() == Type.TypeID.UUID) {
return sourceType;
}
break;
case BINARY:
if (sourceType.typeId() == Type.TypeID.FIXED) {
return sourceType;
}
break;
default:
}
// nothing to fix up, let validation catch promotion errors
return primitive;
}
}
| 6,602 |
0 | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark/PruneColumnsWithoutReordering.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.iceberg.spark;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.netflix.iceberg.Schema;
import com.netflix.iceberg.types.Type;
import com.netflix.iceberg.types.Type.TypeID;
import com.netflix.iceberg.types.TypeUtil;
import com.netflix.iceberg.types.Types;
import org.apache.spark.sql.types.ArrayType;
import org.apache.spark.sql.types.BinaryType;
import org.apache.spark.sql.types.BooleanType;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.DateType;
import org.apache.spark.sql.types.DecimalType;
import org.apache.spark.sql.types.DoubleType;
import org.apache.spark.sql.types.FloatType;
import org.apache.spark.sql.types.IntegerType;
import org.apache.spark.sql.types.LongType;
import org.apache.spark.sql.types.MapType;
import org.apache.spark.sql.types.StringType;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.sql.types.TimestampType;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Supplier;
public class PruneColumnsWithoutReordering extends TypeUtil.CustomOrderSchemaVisitor<Type> {
private final StructType requestedType;
private final Set<Integer> filterRefs;
private DataType current = null;
PruneColumnsWithoutReordering(StructType requestedType, Set<Integer> filterRefs) {
this.requestedType = requestedType;
this.filterRefs = filterRefs;
}
@Override
public Type schema(Schema schema, Supplier<Type> structResult) {
this.current = requestedType;
try {
return structResult.get();
} finally {
this.current = null;
}
}
@Override
public Type struct(Types.StructType struct, Iterable<Type> fieldResults) {
Preconditions.checkNotNull(struct, "Cannot prune null struct. Pruning must start with a schema.");
Preconditions.checkArgument(current instanceof StructType, "Not a struct: %s", current);
List<Types.NestedField> fields = struct.fields();
List<Type> types = Lists.newArrayList(fieldResults);
boolean changed = false;
List<Types.NestedField> newFields = Lists.newArrayListWithExpectedSize(types.size());
for (int i = 0; i < fields.size(); i += 1) {
Types.NestedField field = fields.get(i);
Type type = types.get(i);
if (type == null) {
changed = true;
} else if (field.type() == type) {
newFields.add(field);
} else if (field.isOptional()) {
changed = true;
newFields.add(Types.NestedField.optional(field.fieldId(), field.name(), type));
} else {
changed = true;
newFields.add(Types.NestedField.required(field.fieldId(), field.name(), type));
}
}
if (changed) {
return Types.StructType.of(newFields);
}
return struct;
}
@Override
public Type field(Types.NestedField field, Supplier<Type> fieldResult) {
Preconditions.checkArgument(current instanceof StructType, "Not a struct: %s", current);
StructType struct = (StructType) current;
// fields are resolved by name because Spark only sees the current table schema.
if (struct.getFieldIndex(field.name()).isEmpty()) {
// make sure that filter fields are projected even if they aren't in the requested schema.
if (filterRefs.contains(field.fieldId())) {
return field.type();
}
return null;
}
int fieldIndex = struct.fieldIndex(field.name());
StructField f = struct.fields()[fieldIndex];
Preconditions.checkArgument(f.nullable() || field.isRequired(),
"Cannot project an optional field as non-null: %s", field.name());
this.current = f.dataType();
try {
return fieldResult.get();
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException(
"Invalid projection for field " + field.name() + ": " + e.getMessage(), e);
} finally {
this.current = struct;
}
}
@Override
public Type list(Types.ListType list, Supplier<Type> elementResult) {
Preconditions.checkArgument(current instanceof ArrayType, "Not an array: %s", current);
ArrayType array = (ArrayType) current;
Preconditions.checkArgument(array.containsNull() || !list.isElementOptional(),
"Cannot project an array of optional elements as required elements: %s", array);
this.current = array.elementType();
try {
Type elementType = elementResult.get();
if (list.elementType() == elementType) {
return list;
}
// must be a projected element type, create a new list
if (list.isElementOptional()) {
return Types.ListType.ofOptional(list.elementId(), elementType);
} else {
return Types.ListType.ofRequired(list.elementId(), elementType);
}
} finally {
this.current = array;
}
}
@Override
public Type map(Types.MapType map, Supplier<Type> keyResult, Supplier<Type> valueResult) {
Preconditions.checkArgument(current instanceof MapType, "Not a map: %s", current);
MapType m = (MapType) current;
Preconditions.checkArgument(m.valueContainsNull() || !map.isValueOptional(),
"Cannot project a map of optional values as required values: %s", map);
this.current = m.valueType();
try {
Type valueType = valueResult.get();
if (map.valueType() == valueType) {
return map;
}
if (map.isValueOptional()) {
return Types.MapType.ofOptional(map.keyId(), map.valueId(), map.keyType(), valueType);
} else {
return Types.MapType.ofRequired(map.keyId(), map.valueId(), map.keyType(), valueType);
}
} finally {
this.current = m;
}
}
@Override
public Type primitive(Type.PrimitiveType primitive) {
Class<? extends DataType> expectedType = TYPES.get(primitive.typeId());
Preconditions.checkArgument(expectedType != null && expectedType.isInstance(current),
"Cannot project %s to incompatible type: %s", primitive, current);
// additional checks based on type
switch (primitive.typeId()) {
case DECIMAL:
Types.DecimalType decimal = (Types.DecimalType) primitive;
DecimalType d = (DecimalType) current;
Preconditions.checkArgument(d.scale() == decimal.scale(),
"Cannot project decimal with incompatible scale: %s != %s", d.scale(), decimal.scale());
Preconditions.checkArgument(d.precision() >= decimal.precision(),
"Cannot project decimal with incompatible precision: %s < %s",
d.precision(), decimal.precision());
break;
case TIMESTAMP:
Types.TimestampType timestamp = (Types.TimestampType) primitive;
Preconditions.checkArgument(timestamp.shouldAdjustToUTC(),
"Cannot project timestamp (without time zone) as timestamptz (with time zone)");
break;
default:
}
return primitive;
}
private static final Map<TypeID, Class<? extends DataType>> TYPES = ImmutableMap
.<TypeID, Class<? extends DataType>>builder()
.put(TypeID.BOOLEAN, BooleanType.class)
.put(TypeID.INTEGER, IntegerType.class)
.put(TypeID.LONG, LongType.class)
.put(TypeID.FLOAT, FloatType.class)
.put(TypeID.DOUBLE, DoubleType.class)
.put(TypeID.DATE, DateType.class)
.put(TypeID.TIMESTAMP, TimestampType.class)
.put(TypeID.DECIMAL, DecimalType.class)
.put(TypeID.UUID, StringType.class)
.put(TypeID.STRING, StringType.class)
.put(TypeID.FIXED, BinaryType.class)
.put(TypeID.BINARY, BinaryType.class)
.build();
}
| 6,603 |
0 | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark/TypeToSparkType.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.iceberg.spark;
import com.google.common.collect.Lists;
import com.netflix.iceberg.Schema;
import com.netflix.iceberg.types.Type;
import com.netflix.iceberg.types.TypeUtil;
import com.netflix.iceberg.types.Types;
import org.apache.spark.sql.types.ArrayType$;
import org.apache.spark.sql.types.BinaryType$;
import org.apache.spark.sql.types.BooleanType$;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.DateType$;
import org.apache.spark.sql.types.DecimalType$;
import org.apache.spark.sql.types.DoubleType$;
import org.apache.spark.sql.types.FloatType$;
import org.apache.spark.sql.types.IntegerType$;
import org.apache.spark.sql.types.LongType$;
import org.apache.spark.sql.types.MapType$;
import org.apache.spark.sql.types.Metadata;
import org.apache.spark.sql.types.StringType$;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType$;
import org.apache.spark.sql.types.TimestampType$;
import java.util.List;
class TypeToSparkType extends TypeUtil.SchemaVisitor<DataType> {
TypeToSparkType() {
}
@Override
public DataType schema(Schema schema, DataType structType) {
return structType;
}
@Override
public DataType struct(Types.StructType struct, List<DataType> fieldResults) {
List<Types.NestedField> fields = struct.fields();
List<StructField> sparkFields = Lists.newArrayListWithExpectedSize(fieldResults.size());
for (int i = 0; i < fields.size(); i += 1) {
Types.NestedField field = fields.get(i);
DataType type = fieldResults.get(i);
sparkFields.add(StructField.apply(field.name(), type, field.isOptional(), Metadata.empty()));
}
return StructType$.MODULE$.apply(sparkFields);
}
@Override
public DataType field(Types.NestedField field, DataType fieldResult) {
return fieldResult;
}
@Override
public DataType list(Types.ListType list, DataType elementResult) {
return ArrayType$.MODULE$.apply(elementResult, list.isElementOptional());
}
@Override
public DataType map(Types.MapType map, DataType keyResult, DataType valueResult) {
return MapType$.MODULE$.apply(keyResult, valueResult, map.isValueOptional());
}
@Override
public DataType primitive(Type.PrimitiveType primitive) {
switch (primitive.typeId()) {
case BOOLEAN:
return BooleanType$.MODULE$;
case INTEGER:
return IntegerType$.MODULE$;
case LONG:
return LongType$.MODULE$;
case FLOAT:
return FloatType$.MODULE$;
case DOUBLE:
return DoubleType$.MODULE$;
case DATE:
return DateType$.MODULE$;
case TIME:
throw new UnsupportedOperationException(
"Spark does not support time fields");
case TIMESTAMP:
Types.TimestampType timestamp = (Types.TimestampType) primitive;
if (timestamp.shouldAdjustToUTC()) {
return TimestampType$.MODULE$;
}
throw new UnsupportedOperationException(
"Spark does not support timestamp without time zone fields");
case STRING:
return StringType$.MODULE$;
case UUID:
// use String
return StringType$.MODULE$;
case FIXED:
return BinaryType$.MODULE$;
case BINARY:
return BinaryType$.MODULE$;
case DECIMAL:
Types.DecimalType decimal = (Types.DecimalType) primitive;
return DecimalType$.MODULE$.apply(decimal.precision(), decimal.scale());
default:
throw new UnsupportedOperationException(
"Cannot convert unknown type to Spark: " + primitive);
}
}
}
| 6,604 |
0 | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark/SparkTypeVisitor.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.iceberg.spark;
import com.google.common.collect.Lists;
import org.apache.spark.sql.types.ArrayType;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.MapType;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.sql.types.UserDefinedType;
import java.util.List;
class SparkTypeVisitor<T> {
static <T> T visit(DataType type, SparkTypeVisitor<T> visitor) {
if (type instanceof StructType) {
StructField[] fields = ((StructType) type).fields();
List<T> fieldResults = Lists.newArrayListWithExpectedSize(fields.length);
for (StructField field : fields) {
fieldResults.add(visitor.field(
field,
visit(field.dataType(), visitor)));
}
return visitor.struct((StructType) type, fieldResults);
} else if (type instanceof MapType) {
return visitor.map((MapType) type,
visit(((MapType) type).keyType(), visitor),
visit(((MapType) type).valueType(), visitor));
} else if (type instanceof ArrayType) {
return visitor.array(
(ArrayType) type,
visit(((ArrayType) type).elementType(), visitor));
} else if (type instanceof UserDefinedType){
throw new UnsupportedOperationException(
"User-defined types are not supported");
} else {
return visitor.atomic(type);
}
}
public T struct(StructType struct, List<T> fieldResults) {
return null;
}
public T field(StructField field, T typeResult) {
return null;
}
public T array(ArrayType array, T elementResult) {
return null;
}
public T map(MapType map, T keyResult, T valueResult) {
return null;
}
public T atomic(DataType atomic) {
return null;
}
}
| 6,605 |
0 | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark/SparkTypeToType.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.iceberg.spark;
import com.google.common.collect.Lists;
import com.netflix.iceberg.types.Type;
import com.netflix.iceberg.types.Types;
import org.apache.spark.sql.types.ArrayType;
import org.apache.spark.sql.types.BinaryType;
import org.apache.spark.sql.types.BooleanType;
import org.apache.spark.sql.types.ByteType;
import org.apache.spark.sql.types.CharType;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.DateType;
import org.apache.spark.sql.types.DecimalType;
import org.apache.spark.sql.types.DoubleType;
import org.apache.spark.sql.types.FloatType;
import org.apache.spark.sql.types.IntegerType;
import org.apache.spark.sql.types.LongType;
import org.apache.spark.sql.types.MapType;
import org.apache.spark.sql.types.ShortType;
import org.apache.spark.sql.types.StringType;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.sql.types.TimestampType;
import org.apache.spark.sql.types.VarcharType;
import java.util.List;
class SparkTypeToType extends SparkTypeVisitor<Type> {
private final StructType root;
private int nextId = 0;
SparkTypeToType() {
this.root = null;
}
SparkTypeToType(StructType root) {
this.root = root;
// the root struct's fields use the first ids
this.nextId = root.fields().length;
}
private int getNextId() {
int next = nextId;
nextId += 1;
return next;
}
@Override
public Type struct(StructType struct, List<Type> types) {
StructField[] fields = struct.fields();
List<Types.NestedField> newFields = Lists.newArrayListWithExpectedSize(fields.length);
boolean isRoot = root == struct;
for (int i = 0; i < fields.length; i += 1) {
StructField field = fields[i];
Type type = types.get(i);
int id;
if (isRoot) {
// for new conversions, use ordinals for ids in the root struct
id = i;
} else {
id = getNextId();
}
if (field.nullable()) {
newFields.add(Types.NestedField.optional(id, field.name(), type));
} else {
newFields.add(Types.NestedField.required(id, field.name(), type));
}
}
return Types.StructType.of(newFields);
}
@Override
public Type field(StructField field, Type typeResult) {
return typeResult;
}
@Override
public Type array(ArrayType array, Type elementType) {
if (array.containsNull()) {
return Types.ListType.ofOptional(getNextId(), elementType);
} else {
return Types.ListType.ofRequired(getNextId(), elementType);
}
}
@Override
public Type map(MapType map, Type keyType, Type valueType) {
if (map.valueContainsNull()) {
return Types.MapType.ofOptional(getNextId(), getNextId(), keyType, valueType);
} else {
return Types.MapType.ofRequired(getNextId(), getNextId(), keyType, valueType);
}
}
@Override
public Type atomic(DataType atomic) {
if (atomic instanceof BooleanType) {
return Types.BooleanType.get();
} else if (
atomic instanceof IntegerType ||
atomic instanceof ShortType ||
atomic instanceof ByteType) {
return Types.IntegerType.get();
} else if (atomic instanceof LongType) {
return Types.LongType.get();
} else if (atomic instanceof FloatType) {
return Types.FloatType.get();
} else if (atomic instanceof DoubleType) {
return Types.DoubleType.get();
} else if (
atomic instanceof StringType ||
atomic instanceof CharType ||
atomic instanceof VarcharType) {
return Types.StringType.get();
} else if (atomic instanceof DateType) {
return Types.DateType.get();
} else if (atomic instanceof TimestampType) {
return Types.TimestampType.withZone();
} else if (atomic instanceof DecimalType) {
return Types.DecimalType.of(
((DecimalType) atomic).precision(),
((DecimalType) atomic).scale());
} else if (atomic instanceof BinaryType) {
return Types.BinaryType.get();
}
throw new UnsupportedOperationException(
"Not a supported type: " + atomic.catalogString());
}
}
| 6,606 |
0 | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark/source/Stats.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.iceberg.spark.source;
import org.apache.spark.sql.sources.v2.reader.Statistics;
import java.util.OptionalLong;
class Stats implements Statistics {
private final OptionalLong sizeInBytes;
private final OptionalLong numRows;
Stats(long sizeInBytes, long numRows) {
this.sizeInBytes = OptionalLong.of(sizeInBytes);
this.numRows = OptionalLong.of(numRows);
}
@Override
public OptionalLong sizeInBytes() {
return sizeInBytes;
}
@Override
public OptionalLong numRows() {
return numRows;
}
}
| 6,607 |
0 | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark/source/Writer.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.iceberg.spark.source;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.netflix.iceberg.AppendFiles;
import com.netflix.iceberg.DataFile;
import com.netflix.iceberg.DataFiles;
import com.netflix.iceberg.FileFormat;
import com.netflix.iceberg.Metrics;
import com.netflix.iceberg.PartitionSpec;
import com.netflix.iceberg.Schema;
import com.netflix.iceberg.Table;
import com.netflix.iceberg.TableProperties;
import com.netflix.iceberg.avro.Avro;
import com.netflix.iceberg.exceptions.RuntimeIOException;
import com.netflix.iceberg.hadoop.HadoopInputFile;
import com.netflix.iceberg.hadoop.HadoopOutputFile;
import com.netflix.iceberg.io.FileAppender;
import com.netflix.iceberg.io.InputFile;
import com.netflix.iceberg.io.OutputFile;
import com.netflix.iceberg.orc.ORC;
import com.netflix.iceberg.parquet.Parquet;
import com.netflix.iceberg.spark.data.SparkAvroWriter;
import com.netflix.iceberg.spark.data.SparkOrcWriter;
import com.netflix.iceberg.transforms.Transform;
import com.netflix.iceberg.transforms.Transforms;
import com.netflix.iceberg.types.Types.StringType;
import com.netflix.iceberg.util.Tasks;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.spark.sql.catalyst.InternalRow;
import org.apache.spark.sql.execution.datasources.parquet.ParquetWriteSupport;
import org.apache.spark.sql.sources.v2.writer.DataSourceWriter;
import org.apache.spark.sql.sources.v2.writer.DataWriter;
import org.apache.spark.sql.sources.v2.writer.DataWriterFactory;
import org.apache.spark.sql.sources.v2.writer.SupportsWriteInternalRow;
import org.apache.spark.sql.sources.v2.writer.WriterCommitMessage;
import org.apache.spark.util.SerializableConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Closeable;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.function.Function;
import static com.google.common.collect.Iterables.concat;
import static com.google.common.collect.Iterables.transform;
import static com.netflix.iceberg.TableProperties.COMMIT_MAX_RETRY_WAIT_MS;
import static com.netflix.iceberg.TableProperties.COMMIT_MAX_RETRY_WAIT_MS_DEFAULT;
import static com.netflix.iceberg.TableProperties.COMMIT_MIN_RETRY_WAIT_MS;
import static com.netflix.iceberg.TableProperties.COMMIT_MIN_RETRY_WAIT_MS_DEFAULT;
import static com.netflix.iceberg.TableProperties.COMMIT_NUM_RETRIES;
import static com.netflix.iceberg.TableProperties.COMMIT_NUM_RETRIES_DEFAULT;
import static com.netflix.iceberg.TableProperties.COMMIT_TOTAL_RETRY_TIME_MS;
import static com.netflix.iceberg.TableProperties.COMMIT_TOTAL_RETRY_TIME_MS_DEFAULT;
import static com.netflix.iceberg.TableProperties.OBJECT_STORE_ENABLED;
import static com.netflix.iceberg.TableProperties.OBJECT_STORE_ENABLED_DEFAULT;
import static com.netflix.iceberg.TableProperties.OBJECT_STORE_PATH;
import static com.netflix.iceberg.spark.SparkSchemaUtil.convert;
// TODO: parameterize DataSourceWriter with subclass of WriterCommitMessage
class Writer implements DataSourceWriter, SupportsWriteInternalRow {
private static final Transform<String, Integer> HASH_FUNC = Transforms
.bucket(StringType.get(), Integer.MAX_VALUE);
private static final Logger LOG = LoggerFactory.getLogger(Writer.class);
private final Table table;
private final Configuration conf;
private final FileFormat format;
Writer(Table table, Configuration conf, FileFormat format) {
this.table = table;
this.conf = conf;
this.format = format;
}
@Override
public DataWriterFactory<InternalRow> createInternalRowWriterFactory() {
return new WriterFactory(table.spec(), format, dataLocation(), table.properties(), conf);
}
@Override
public void commit(WriterCommitMessage[] messages) {
AppendFiles append = table.newAppend();
int numFiles = 0;
for (DataFile file : files(messages)) {
numFiles += 1;
append.appendFile(file);
}
LOG.info("Appending {} files to {}", numFiles, table);
long start = System.currentTimeMillis();
append.commit(); // abort is automatically called if this fails
long duration = System.currentTimeMillis() - start;
LOG.info("Committed in {} ms", duration);
}
@Override
public void abort(WriterCommitMessage[] messages) {
FileSystem fs;
try {
fs = new Path(table.location()).getFileSystem(conf);
} catch (IOException e) {
throw new RuntimeIOException(e);
}
Tasks.foreach(files(messages))
.retry(propertyAsInt(COMMIT_NUM_RETRIES, COMMIT_NUM_RETRIES_DEFAULT))
.exponentialBackoff(
propertyAsInt(COMMIT_MIN_RETRY_WAIT_MS, COMMIT_MIN_RETRY_WAIT_MS_DEFAULT),
propertyAsInt(COMMIT_MAX_RETRY_WAIT_MS, COMMIT_MAX_RETRY_WAIT_MS_DEFAULT),
propertyAsInt(COMMIT_TOTAL_RETRY_TIME_MS, COMMIT_TOTAL_RETRY_TIME_MS_DEFAULT),
2.0 /* exponential */ )
.throwFailureWhenFinished()
.run(file -> {
try {
fs.delete(new Path(file.path().toString()), false /* not recursive */ );
} catch (IOException e) {
throw new RuntimeIOException(e);
}
});
}
private Iterable<DataFile> files(WriterCommitMessage[] messages) {
if (messages.length > 0) {
return concat(transform(Arrays.asList(messages), message -> message != null
? ImmutableList.copyOf(((TaskCommit) message).files())
: ImmutableList.of()));
}
return ImmutableList.of();
}
private int propertyAsInt(String property, int defaultValue) {
Map<String, String> properties = table.properties();
String value = properties.get(property);
if (value != null) {
return Integer.parseInt(properties.get(property));
}
return defaultValue;
}
private String dataLocation() {
return table.properties().getOrDefault(
TableProperties.WRITE_NEW_DATA_LOCATION,
new Path(new Path(table.location()), "data").toString());
}
@Override
public String toString() {
return String.format("IcebergWrite(table=%s, type=%s, format=%s)",
table, table.schema().asStruct(), format);
}
private static class TaskCommit implements WriterCommitMessage {
private final DataFile[] files;
TaskCommit() {
this.files = new DataFile[0];
}
TaskCommit(DataFile file) {
this.files = new DataFile[] { file };
}
TaskCommit(List<DataFile> files) {
this.files = files.toArray(new DataFile[files.size()]);
}
DataFile[] files() {
return files;
}
}
private static class WriterFactory implements DataWriterFactory<InternalRow> {
private final PartitionSpec spec;
private final FileFormat format;
private final String dataLocation;
private final Map<String, String> properties;
private final SerializableConfiguration conf;
private final String uuid = UUID.randomUUID().toString();
private transient Path dataPath = null;
WriterFactory(PartitionSpec spec, FileFormat format, String dataLocation,
Map<String, String> properties, Configuration conf) {
this.spec = spec;
this.format = format;
this.dataLocation = dataLocation;
this.properties = properties;
this.conf = new SerializableConfiguration(conf);
}
@Override
public DataWriter<InternalRow> createDataWriter(int partitionId, int attemptNumber) {
String filename = format.addExtension(String.format("%05d-%d-%s",
partitionId, attemptNumber, uuid));
AppenderFactory<InternalRow> factory = new SparkAppenderFactory();
if (spec.fields().isEmpty()) {
return new UnpartitionedWriter(lazyDataPath(), filename, format, conf.value(), factory);
} else {
Path baseDataPath = lazyDataPath(); // avoid calling this in the output path function
Function<PartitionKey, Path> outputPathFunc = key ->
new Path(new Path(baseDataPath, key.toPath()), filename);
boolean useObjectStorage = (
Boolean.parseBoolean(properties.get(OBJECT_STORE_ENABLED)) ||
OBJECT_STORE_ENABLED_DEFAULT
);
if (useObjectStorage) {
// try to get db and table portions of the path for context in the object store
String context = pathContext(baseDataPath);
String objectStore = properties.get(OBJECT_STORE_PATH);
Preconditions.checkNotNull(objectStore,
"Cannot use object storage, missing location: " + OBJECT_STORE_PATH);
Path objectStorePath = new Path(objectStore);
outputPathFunc = key -> {
String partitionAndFilename = key.toPath() + "/" + filename;
int hash = HASH_FUNC.apply(partitionAndFilename);
return new Path(objectStorePath,
String.format("%08x/%s/%s", hash, context, partitionAndFilename));
};
}
return new PartitionedWriter(spec, format, conf.value(), factory, outputPathFunc);
}
}
private static String pathContext(Path dataPath) {
Path parent = dataPath.getParent();
if (parent != null) {
// remove the data folder
if (dataPath.getName().equals("data")) {
return pathContext(parent);
}
return parent.getName() + "/" + dataPath.getName();
}
return dataPath.getName();
}
private Path lazyDataPath() {
if (dataPath == null) {
this.dataPath = new Path(dataLocation);
}
return dataPath;
}
private class SparkAppenderFactory implements AppenderFactory<InternalRow> {
public FileAppender<InternalRow> newAppender(OutputFile file, FileFormat format) {
Schema schema = spec.schema();
try {
switch (format) {
case PARQUET:
String jsonSchema = convert(schema).json();
return Parquet.write(file)
.writeSupport(new ParquetWriteSupport())
.set("org.apache.spark.sql.parquet.row.attributes", jsonSchema)
.set("spark.sql.parquet.writeLegacyFormat", "false")
.set("spark.sql.parquet.binaryAsString", "false")
.set("spark.sql.parquet.int96AsTimestamp", "false")
.set("spark.sql.parquet.outputTimestampType", "TIMESTAMP_MICROS")
.setAll(properties)
.schema(schema)
.build();
case AVRO:
return Avro.write(file)
.createWriterFunc(ignored -> new SparkAvroWriter(schema))
.setAll(properties)
.schema(schema)
.build();
case ORC: {
@SuppressWarnings("unchecked")
SparkOrcWriter writer = new SparkOrcWriter(ORC.write(file)
.schema(schema)
.build());
return writer;
}
default:
throw new UnsupportedOperationException("Cannot write unknown format: " + format);
}
} catch (IOException e) {
throw new RuntimeIOException(e);
}
}
}
}
private interface AppenderFactory<T> {
FileAppender<T> newAppender(OutputFile file, FileFormat format);
}
private static class UnpartitionedWriter implements DataWriter<InternalRow>, Closeable {
private final Path file;
private final Configuration conf;
private FileAppender<InternalRow> appender = null;
private Metrics metrics = null;
UnpartitionedWriter(Path dataPath, String filename, FileFormat format,
Configuration conf, AppenderFactory<InternalRow> factory) {
this.file = new Path(dataPath, filename);
this.appender = factory.newAppender(HadoopOutputFile.fromPath(file, conf), format);
this.conf = conf;
}
@Override
public void write(InternalRow record) {
appender.add(record);
}
@Override
public WriterCommitMessage commit() throws IOException {
Preconditions.checkArgument(appender != null, "Commit called on a closed writer: %s", this);
close();
if (metrics.recordCount() == 0L) {
FileSystem fs = file.getFileSystem(conf);
fs.delete(file, false);
return new TaskCommit();
}
InputFile inFile = HadoopInputFile.fromPath(file, conf);
DataFile dataFile = DataFiles.fromInputFile(inFile, null, metrics);
return new TaskCommit(dataFile);
}
@Override
public void abort() throws IOException {
Preconditions.checkArgument(appender != null, "Abort called on a closed writer: %s", this);
close();
FileSystem fs = file.getFileSystem(conf);
fs.delete(file, false);
}
@Override
public void close() throws IOException {
if (this.appender != null) {
this.appender.close();
this.metrics = appender.metrics();
this.appender = null;
}
}
}
private static class PartitionedWriter implements DataWriter<InternalRow> {
private final Set<PartitionKey> completedPartitions = Sets.newHashSet();
private final List<DataFile> completedFiles = Lists.newArrayList();
private final PartitionSpec spec;
private final FileFormat format;
private final Configuration conf;
private final AppenderFactory<InternalRow> factory;
private final Function<PartitionKey, Path> outputPathFunc;
private final PartitionKey key;
private PartitionKey currentKey = null;
private FileAppender<InternalRow> currentAppender = null;
private Path currentPath = null;
PartitionedWriter(PartitionSpec spec, FileFormat format, Configuration conf,
AppenderFactory<InternalRow> factory,
Function<PartitionKey, Path> outputPathFunc) {
this.spec = spec;
this.format = format;
this.conf = conf;
this.factory = factory;
this.outputPathFunc = outputPathFunc;
this.key = new PartitionKey(spec);
}
@Override
public void write(InternalRow row) throws IOException {
key.partition(row);
if (!key.equals(currentKey)) {
closeCurrent();
if (completedPartitions.contains(key)) {
// if rows are not correctly grouped, detect and fail the write
PartitionKey existingKey = Iterables.find(completedPartitions, key::equals, null);
LOG.warn("Duplicate key: {} == {}", existingKey, key);
throw new IllegalStateException("Already closed file for partition: " + key.toPath());
}
this.currentKey = key.copy();
this.currentPath = outputPathFunc.apply(currentKey);
OutputFile file = HadoopOutputFile.fromPath(currentPath, conf);
this.currentAppender = factory.newAppender(file, format);
}
currentAppender.add(row);
}
@Override
public WriterCommitMessage commit() throws IOException {
closeCurrent();
return new TaskCommit(completedFiles);
}
@Override
public void abort() throws IOException {
FileSystem fs = currentPath.getFileSystem(conf);
// clean up files created by this writer
Tasks.foreach(completedFiles)
.throwFailureWhenFinished()
.noRetry()
.run(file -> fs.delete(new Path(file.path().toString())), IOException.class);
if (currentAppender != null) {
currentAppender.close();
this.currentAppender = null;
fs.delete(currentPath);
}
}
private void closeCurrent() throws IOException {
if (currentAppender != null) {
currentAppender.close();
// metrics are only valid after the appender is closed
Metrics metrics = currentAppender.metrics();
this.currentAppender = null;
InputFile inFile = HadoopInputFile.fromPath(currentPath, conf);
DataFile dataFile = DataFiles.builder(spec)
.withInputFile(inFile)
.withPartition(currentKey)
.withMetrics(metrics)
.build();
completedPartitions.add(currentKey);
completedFiles.add(dataFile);
}
}
}
}
| 6,608 |
0 | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark/source/PartitionKey.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.iceberg.spark.source;
import com.google.common.collect.Maps;
import com.netflix.iceberg.PartitionField;
import com.netflix.iceberg.PartitionSpec;
import com.netflix.iceberg.Schema;
import com.netflix.iceberg.StructLike;
import com.netflix.iceberg.transforms.Transform;
import com.netflix.iceberg.types.Type;
import com.netflix.iceberg.types.TypeUtil;
import com.netflix.iceberg.types.Types;
import org.apache.spark.sql.catalyst.InternalRow;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.Decimal;
import org.apache.spark.unsafe.types.UTF8String;
import java.lang.reflect.Array;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import static com.netflix.iceberg.spark.SparkSchemaUtil.convert;
class PartitionKey implements StructLike {
private final PartitionSpec spec;
private final int size;
private final Object[] partitionTuple;
private final Transform[] transforms;
private final Accessor<InternalRow>[] accessors;
@SuppressWarnings("unchecked")
PartitionKey(PartitionSpec spec) {
this.spec = spec;
List<PartitionField> fields = spec.fields();
this.size = fields.size();
this.partitionTuple = new Object[size];
this.transforms = new Transform[size];
this.accessors = (Accessor<InternalRow>[]) Array.newInstance(Accessor.class, size);
Schema schema = spec.schema();
Map<Integer, Accessor<InternalRow>> accessors = buildAccessors(schema);
for (int i = 0; i < size; i += 1) {
PartitionField field = fields.get(i);
Accessor<InternalRow> accessor = accessors.get(field.sourceId());
if (accessor == null) {
throw new RuntimeException(
"Cannot build accessor for field: " + schema.findField(field.sourceId()));
}
this.accessors[i] = accessor;
this.transforms[i] = field.transform();
}
}
private PartitionKey(PartitionKey toCopy) {
this.spec = toCopy.spec;
this.size = toCopy.size;
this.partitionTuple = new Object[toCopy.partitionTuple.length];
this.transforms = toCopy.transforms;
this.accessors = toCopy.accessors;
for (int i = 0; i < partitionTuple.length; i += 1) {
this.partitionTuple[i] = defensiveCopyIfNeeded(toCopy.partitionTuple[i]);
}
}
private Object defensiveCopyIfNeeded(Object obj) {
if (obj instanceof UTF8String) {
// bytes backing the UTF8 string might be reused
byte[] bytes = ((UTF8String) obj).getBytes();
return UTF8String.fromBytes(Arrays.copyOf(bytes, bytes.length));
}
return obj;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("[");
for (int i = 0; i < partitionTuple.length; i += 1) {
if (i > 0) {
sb.append(", ");
}
sb.append(partitionTuple[i]);
}
sb.append("]");
return sb.toString();
}
PartitionKey copy() {
return new PartitionKey(this);
}
String toPath() {
return spec.partitionToPath(this);
}
@SuppressWarnings("unchecked")
void partition(InternalRow row) {
for (int i = 0; i < partitionTuple.length; i += 1) {
Transform<Object, Object> transform = transforms[i];
partitionTuple[i] = transform.apply(accessors[i].get(row));
}
}
@Override
public int size() {
return size;
}
@Override
@SuppressWarnings("unchecked")
public <T> T get(int pos, Class<T> javaClass) {
return javaClass.cast(partitionTuple[pos]);
}
@Override
public <T> void set(int pos, T value) {
partitionTuple[pos] = value;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
PartitionKey that = (PartitionKey) o;
return Arrays.equals(partitionTuple, that.partitionTuple);
}
@Override
public int hashCode() {
return Arrays.hashCode(partitionTuple);
}
private interface Accessor<T> {
Object get(T container);
}
private static Map<Integer, Accessor<InternalRow>> buildAccessors(Schema schema) {
return TypeUtil.visit(schema, new BuildPositionAccessors());
}
private static Accessor<InternalRow> newAccessor(int p, Type type) {
switch (type.typeId()) {
case STRING:
return new StringAccessor(p, convert(type));
case DECIMAL:
return new DecimalAccessor(p, convert(type));
default:
return new PositionAccessor(p, convert(type));
}
}
private static Accessor<InternalRow> newAccessor(int p, boolean isOptional, Types.StructType type,
Accessor<InternalRow> accessor) {
int size = type.fields().size();
if (isOptional) {
// the wrapped position handles null layers
return new WrappedPositionAccessor(p, size, accessor);
} else if (accessor instanceof PositionAccessor) {
return new Position2Accessor(p, size, (PositionAccessor) accessor);
} else if (accessor instanceof Position2Accessor) {
return new Position3Accessor(p, size, (Position2Accessor) accessor);
} else {
return new WrappedPositionAccessor(p, size, accessor);
}
}
private static class BuildPositionAccessors
extends TypeUtil.SchemaVisitor<Map<Integer, Accessor<InternalRow>>> {
@Override
public Map<Integer, Accessor<InternalRow>> schema(
Schema schema, Map<Integer, Accessor<InternalRow>> structResult) {
return structResult;
}
@Override
public Map<Integer, Accessor<InternalRow>> struct(
Types.StructType struct, List<Map<Integer, Accessor<InternalRow>>> fieldResults) {
Map<Integer, Accessor<InternalRow>> accessors = Maps.newHashMap();
List<Types.NestedField> fields = struct.fields();
for (int i = 0; i < fieldResults.size(); i += 1) {
Types.NestedField field = fields.get(i);
Map<Integer, Accessor<InternalRow>> result = fieldResults.get(i);
if (result != null) {
for (Map.Entry<Integer, Accessor<InternalRow>> entry : result.entrySet()) {
accessors.put(entry.getKey(), newAccessor(i, field.isOptional(),
field.type().asNestedType().asStructType(), entry.getValue()));
}
} else {
accessors.put(field.fieldId(), newAccessor(i, field.type()));
}
}
if (accessors.isEmpty()) {
return null;
}
return accessors;
}
@Override
public Map<Integer, Accessor<InternalRow>> field(
Types.NestedField field, Map<Integer, Accessor<InternalRow>> fieldResult) {
return fieldResult;
}
}
private static class PositionAccessor implements Accessor<InternalRow> {
protected final DataType type;
protected int p;
private PositionAccessor(int p, DataType type) {
this.p = p;
this.type = type;
}
@Override
public Object get(InternalRow row) {
if (row.isNullAt(p)) {
return null;
}
return row.get(p, type);
}
}
private static class StringAccessor extends PositionAccessor {
private StringAccessor(int p, DataType type) {
super(p, type);
}
@Override
public Object get(InternalRow row) {
if (row.isNullAt(p)) {
return null;
}
return row.get(p, type).toString();
}
}
private static class DecimalAccessor extends PositionAccessor {
private DecimalAccessor(int p, DataType type) {
super(p, type);
}
@Override
public Object get(InternalRow row) {
if (row.isNullAt(p)) {
return null;
}
return ((Decimal) row.get(p, type)).toJavaBigDecimal();
}
}
private static class Position2Accessor implements Accessor<InternalRow> {
private final int p0;
private final int size0;
private final int p1;
private final DataType type;
private Position2Accessor(int p, int size, PositionAccessor wrapped) {
this.p0 = p;
this.size0 = size;
this.p1 = wrapped.p;
this.type = wrapped.type;
}
@Override
public Object get(InternalRow row) {
return row.getStruct(p0, size0).get(p1, type);
}
}
private static class Position3Accessor implements Accessor<InternalRow> {
private final int p0;
private final int size0;
private final int p1;
private final int size1;
private final int p2;
private final DataType type;
private Position3Accessor(int p, int size, Position2Accessor wrapped) {
this.p0 = p;
this.size0 = size;
this.p1 = wrapped.p0;
this.size1 = wrapped.size0;
this.p2 = wrapped.p1;
this.type = wrapped.type;
}
@Override
public Object get(InternalRow row) {
return row.getStruct(p0, size0).getStruct(p1, size1).get(p2, type);
}
}
private static class WrappedPositionAccessor implements Accessor<InternalRow> {
private final int p;
private final int size;
private final Accessor<InternalRow> accessor;
private WrappedPositionAccessor(int p, int size, Accessor<InternalRow> accessor) {
this.p = p;
this.size = size;
this.accessor = accessor;
}
@Override
public Object get(InternalRow row) {
InternalRow inner = row.getStruct(p, size);
if (inner != null) {
return accessor.get(inner);
}
return null;
}
}
}
| 6,609 |
0 | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark/source/Reader.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.iceberg.spark.source;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.netflix.iceberg.CombinedScanTask;
import com.netflix.iceberg.DataFile;
import com.netflix.iceberg.FileScanTask;
import com.netflix.iceberg.PartitionField;
import com.netflix.iceberg.PartitionSpec;
import com.netflix.iceberg.Schema;
import com.netflix.iceberg.SchemaParser;
import com.netflix.iceberg.StructLike;
import com.netflix.iceberg.Table;
import com.netflix.iceberg.TableScan;
import com.netflix.iceberg.avro.Avro;
import com.netflix.iceberg.common.DynMethods;
import com.netflix.iceberg.exceptions.RuntimeIOException;
import com.netflix.iceberg.expressions.Expression;
import com.netflix.iceberg.hadoop.HadoopInputFile;
import com.netflix.iceberg.io.CloseableIterable;
import com.netflix.iceberg.io.InputFile;
import com.netflix.iceberg.parquet.Parquet;
import com.netflix.iceberg.spark.SparkExpressions;
import com.netflix.iceberg.spark.SparkSchemaUtil;
import com.netflix.iceberg.spark.data.SparkAvroReader;
import com.netflix.iceberg.spark.data.SparkOrcReader;
import com.netflix.iceberg.spark.data.SparkParquetReaders;
import com.netflix.iceberg.types.TypeUtil;
import com.netflix.iceberg.types.Types;
import org.apache.hadoop.conf.Configuration;
import org.apache.spark.sql.catalyst.InternalRow;
import org.apache.spark.sql.catalyst.expressions.Attribute;
import org.apache.spark.sql.catalyst.expressions.AttributeReference;
import org.apache.spark.sql.catalyst.expressions.GenericInternalRow;
import org.apache.spark.sql.catalyst.expressions.JoinedRow;
import org.apache.spark.sql.catalyst.expressions.UnsafeProjection;
import org.apache.spark.sql.catalyst.expressions.UnsafeRow;
import org.apache.spark.sql.sources.v2.reader.DataReader;
import org.apache.spark.sql.sources.v2.reader.DataSourceReader;
import org.apache.spark.sql.sources.v2.reader.DataReaderFactory;
import org.apache.spark.sql.sources.v2.reader.Statistics;
import org.apache.spark.sql.sources.v2.reader.SupportsPushDownCatalystFilters;
import org.apache.spark.sql.sources.v2.reader.SupportsPushDownRequiredColumns;
import org.apache.spark.sql.sources.v2.reader.SupportsReportStatistics;
import org.apache.spark.sql.sources.v2.reader.SupportsScanUnsafeRow;
import org.apache.spark.sql.types.BinaryType;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.Decimal;
import org.apache.spark.sql.types.DecimalType;
import org.apache.spark.sql.types.StringType;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.unsafe.types.UTF8String;
import org.apache.spark.util.SerializableConfiguration;
import java.io.Closeable;
import java.io.IOException;
import java.io.Serializable;
import java.nio.ByteBuffer;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.function.Function;
import static com.google.common.collect.Iterators.transform;
import static com.netflix.iceberg.spark.SparkSchemaUtil.convert;
import static com.netflix.iceberg.spark.SparkSchemaUtil.prune;
import static scala.collection.JavaConverters.asScalaBufferConverter;
import static scala.collection.JavaConverters.seqAsJavaListConverter;
class Reader implements DataSourceReader, SupportsScanUnsafeRow, SupportsPushDownCatalystFilters,
SupportsPushDownRequiredColumns, SupportsReportStatistics {
private static final org.apache.spark.sql.catalyst.expressions.Expression[] NO_EXPRS =
new org.apache.spark.sql.catalyst.expressions.Expression[0];
private final Table table;
private final SerializableConfiguration conf;
private StructType requestedSchema = null;
private List<Expression> filterExpressions = null;
private org.apache.spark.sql.catalyst.expressions.Expression[] pushedExprs = NO_EXPRS;
// lazy variables
private Schema schema = null;
private StructType type = null; // cached because Spark accesses it multiple times
private List<CombinedScanTask> tasks = null; // lazy cache of tasks
Reader(Table table, Configuration conf) {
this.table = table;
this.conf = new SerializableConfiguration(conf);
this.schema = table.schema();
}
private Schema lazySchema() {
if (schema == null) {
if (requestedSchema != null) {
this.schema = prune(table.schema(), requestedSchema);
} else {
this.schema = table.schema();
}
}
return schema;
}
private StructType lazyType() {
if (type == null) {
this.type = convert(lazySchema());
}
return type;
}
@Override
public StructType readSchema() {
return lazyType();
}
@Override
public List<DataReaderFactory<UnsafeRow>> createUnsafeRowReaderFactories() {
String tableSchemaString = SchemaParser.toJson(table.schema());
String expectedSchemaString = SchemaParser.toJson(lazySchema());
List<DataReaderFactory<UnsafeRow>> readTasks = Lists.newArrayList();
for (CombinedScanTask task : tasks()) {
readTasks.add(new ReadTask(task, tableSchemaString, expectedSchemaString, conf));
}
return readTasks;
}
@Override
public org.apache.spark.sql.catalyst.expressions.Expression[] pushCatalystFilters(
org.apache.spark.sql.catalyst.expressions.Expression[] filters) {
this.tasks = null; // invalidate cached tasks, if present
List<Expression> expressions = Lists.newArrayListWithExpectedSize(filters.length);
List<org.apache.spark.sql.catalyst.expressions.Expression> pushed =
Lists.newArrayListWithExpectedSize(filters.length);
for (org.apache.spark.sql.catalyst.expressions.Expression filter : filters) {
Expression expr = SparkExpressions.convert(filter);
if (expr != null) {
expressions.add(expr);
pushed.add(filter);
}
}
this.filterExpressions = expressions;
this.pushedExprs = pushed.toArray(new org.apache.spark.sql.catalyst.expressions.Expression[0]);
// invalidate the schema that will be projected
this.schema = null;
this.type = null;
// Spark doesn't support residuals per task, so return all filters
// to get Spark to handle record-level filtering
return filters;
}
@Override
public org.apache.spark.sql.catalyst.expressions.Expression[] pushedCatalystFilters() {
return pushedExprs;
}
@Override
public void pruneColumns(StructType requestedSchema) {
this.requestedSchema = requestedSchema;
// invalidate the schema that will be projected
this.schema = null;
this.type = null;
}
@Override
public Statistics getStatistics() {
long sizeInBytes = 0L;
long numRows = 0L;
for (CombinedScanTask task : tasks()) {
for (FileScanTask file : task.files()) {
sizeInBytes += file.length();
numRows += file.file().recordCount();
}
}
return new Stats(sizeInBytes, numRows);
}
private List<CombinedScanTask> tasks() {
if (tasks == null) {
TableScan scan = table.newScan().project(lazySchema());
if (filterExpressions != null) {
for (Expression filter : filterExpressions) {
scan = scan.filter(filter);
}
}
try (CloseableIterable<CombinedScanTask> tasksIterable = scan.planTasks()) {
this.tasks = Lists.newArrayList(tasksIterable);
} catch (IOException e) {
throw new RuntimeIOException(e, "Failed to close table scan: %s", scan);
}
}
return tasks;
}
@Override
public String toString() {
return String.format(
"IcebergScan(table=%s, type=%s, filters=%s)",
table, lazySchema().asStruct(), filterExpressions);
}
private static class ReadTask implements DataReaderFactory<UnsafeRow>, Serializable {
private final CombinedScanTask task;
private final String tableSchemaString;
private final String expectedSchemaString;
private final SerializableConfiguration conf;
private transient Schema tableSchema = null;
private transient Schema expectedSchema = null;
private ReadTask(CombinedScanTask task, String tableSchemaString, String expectedSchemaString,
SerializableConfiguration conf) {
this.task = task;
this.tableSchemaString = tableSchemaString;
this.expectedSchemaString = expectedSchemaString;
this.conf = conf;
}
@Override
public DataReader<UnsafeRow> createDataReader() {
return new TaskDataReader(task, lazyTableSchema(), lazyExpectedSchema(), conf.value());
}
private Schema lazyTableSchema() {
if (tableSchema == null) {
this.tableSchema = SchemaParser.fromJson(tableSchemaString);
}
return tableSchema;
}
private Schema lazyExpectedSchema() {
if (expectedSchema == null) {
this.expectedSchema = SchemaParser.fromJson(expectedSchemaString);
}
return expectedSchema;
}
}
private static class TaskDataReader implements DataReader<UnsafeRow> {
// for some reason, the apply method can't be called from Java without reflection
private static final DynMethods.UnboundMethod APPLY_PROJECTION = DynMethods.builder("apply")
.impl(UnsafeProjection.class, InternalRow.class)
.build();
private final Iterator<FileScanTask> tasks;
private final Schema tableSchema;
private final Schema expectedSchema;
private final Configuration conf;
private Iterator<UnsafeRow> currentIterator = null;
private Closeable currentCloseable = null;
private UnsafeRow current = null;
public TaskDataReader(CombinedScanTask task, Schema tableSchema, Schema expectedSchema, Configuration conf) {
this.tasks = task.files().iterator();
this.tableSchema = tableSchema;
this.expectedSchema = expectedSchema;
this.conf = conf;
// open last because the schemas and conf must be set
this.currentIterator = open(tasks.next());
}
@Override
public boolean next() throws IOException {
while (true) {
if (currentIterator.hasNext()) {
this.current = currentIterator.next();
return true;
} else if (tasks.hasNext()) {
this.currentCloseable.close();
this.currentIterator = open(tasks.next());
} else {
return false;
}
}
}
@Override
public UnsafeRow get() {
return current;
}
@Override
public void close() throws IOException {
// close the current iterator
this.currentCloseable.close();
// exhaust the task iterator
while (tasks.hasNext()) {
tasks.next();
}
}
private Iterator<UnsafeRow> open(FileScanTask task) {
DataFile file = task.file();
// schema or rows returned by readers
Schema finalSchema = expectedSchema;
PartitionSpec spec = task.spec();
Set<Integer> idColumns = identitySourceIds(spec);
// schema needed for the projection and filtering
Schema requiredSchema = prune(tableSchema, convert(finalSchema), task.residual());
boolean hasJoinedPartitionColumns = !idColumns.isEmpty();
boolean hasExtraFilterColumns = requiredSchema.columns().size() != finalSchema.columns().size();
Schema iterSchema;
Iterator<InternalRow> iter;
if (hasJoinedPartitionColumns) {
// schema used to read data files
Schema readSchema = TypeUtil.selectNot(requiredSchema, idColumns);
Schema partitionSchema = TypeUtil.select(requiredSchema, idColumns);
PartitionRowConverter convertToRow = new PartitionRowConverter(partitionSchema, spec);
JoinedRow joined = new JoinedRow();
InternalRow partition = convertToRow.apply(file.partition());
joined.withRight(partition);
// create joined rows and project from the joined schema to the final schema
iterSchema = TypeUtil.join(readSchema, partitionSchema);
iter = transform(open(task, readSchema, conf), joined::withLeft);
} else if (hasExtraFilterColumns) {
// add projection to the final schema
iterSchema = requiredSchema;
iter = open(task, requiredSchema, conf);
} else {
// return the base iterator
iterSchema = finalSchema;
iter = open(task, finalSchema, conf);
}
return transform(iter,
APPLY_PROJECTION.bind(projection(finalSchema, iterSchema))::invoke);
}
private static UnsafeProjection projection(Schema finalSchema, Schema readSchema) {
StructType struct = convert(readSchema);
List<AttributeReference> refs = seqAsJavaListConverter(struct.toAttributes()).asJava();
List<Attribute> attrs = Lists.newArrayListWithExpectedSize(struct.fields().length);
List<org.apache.spark.sql.catalyst.expressions.Expression> exprs =
Lists.newArrayListWithExpectedSize(struct.fields().length);
for (AttributeReference ref : refs) {
attrs.add(ref.toAttribute());
}
for (Types.NestedField field : finalSchema.columns()) {
int indexInReadSchema = struct.fieldIndex(field.name());
exprs.add(refs.get(indexInReadSchema));
}
return UnsafeProjection.create(
asScalaBufferConverter(exprs).asScala().toSeq(),
asScalaBufferConverter(attrs).asScala().toSeq());
}
private static Set<Integer> identitySourceIds(PartitionSpec spec) {
Set<Integer> sourceIds = Sets.newHashSet();
List<PartitionField> fields = spec.fields();
for (int i = 0; i < fields.size(); i += 1) {
PartitionField field = fields.get(i);
if ("identity".equals(field.transform().toString())) {
sourceIds.add(field.sourceId());
}
}
return sourceIds;
}
private Iterator<InternalRow> open(FileScanTask task, Schema readSchema,
Configuration conf) {
InputFile location = HadoopInputFile.fromLocation(task.file().path(), conf);
CloseableIterable<InternalRow> iter;
switch (task.file().format()) {
case ORC:
SparkOrcReader reader = new SparkOrcReader(location, task, readSchema);
this.currentCloseable = reader;
return reader;
case PARQUET:
iter = newParquetIterable(location, task, readSchema);
break;
case AVRO:
iter = newAvroIterable(location, task, readSchema);
break;
default:
throw new UnsupportedOperationException(
"Cannot read unknown format: " + task.file().format());
}
this.currentCloseable = iter;
return iter.iterator();
}
private CloseableIterable<InternalRow> newAvroIterable(InputFile location,
FileScanTask task,
Schema readSchema) {
return Avro.read(location)
.reuseContainers()
.project(readSchema)
.split(task.start(), task.length())
.createReaderFunc(SparkAvroReader::new)
.build();
}
private CloseableIterable<InternalRow> newParquetIterable(InputFile location,
FileScanTask task,
Schema readSchema) {
return Parquet.read(location)
.project(readSchema)
.split(task.start(), task.length())
.createReaderFunc(fileSchema -> SparkParquetReaders.buildReader(readSchema, fileSchema))
.filter(task.residual())
.build();
}
}
private static class PartitionRowConverter implements Function<StructLike, InternalRow> {
private final DataType[] types;
private final int[] positions;
private final Class<?>[] javaTypes;
private final GenericInternalRow reusedRow;
PartitionRowConverter(Schema partitionSchema, PartitionSpec spec) {
StructType partitionType = SparkSchemaUtil.convert(partitionSchema);
StructField[] fields = partitionType.fields();
this.types = new DataType[fields.length];
this.positions = new int[types.length];
this.javaTypes = new Class<?>[types.length];
this.reusedRow = new GenericInternalRow(types.length);
List<PartitionField> partitionFields = spec.fields();
for (int rowIndex = 0; rowIndex < fields.length; rowIndex += 1) {
this.types[rowIndex] = fields[rowIndex].dataType();
int sourceId = partitionSchema.columns().get(rowIndex).fieldId();
for (int specIndex = 0; specIndex < partitionFields.size(); specIndex += 1) {
PartitionField field = spec.fields().get(specIndex);
if (field.sourceId() == sourceId && "identity".equals(field.transform().toString())) {
positions[rowIndex] = specIndex;
javaTypes[rowIndex] = spec.javaClasses()[specIndex];
break;
}
}
}
}
@Override
public InternalRow apply(StructLike tuple) {
for (int i = 0; i < types.length; i += 1) {
reusedRow.update(i, convert(tuple.get(positions[i], javaTypes[i]), types[i]));
}
return reusedRow;
}
/**
* Converts the objects into instances used by Spark's InternalRow.
*
* @param value a data value
* @param type the Spark data type
* @return the value converted to the representation expected by Spark's InternalRow.
*/
private static Object convert(Object value, DataType type) {
if (type instanceof StringType) {
return UTF8String.fromString(value.toString());
} else if (type instanceof BinaryType) {
ByteBuffer buffer = (ByteBuffer) value;
return buffer.get(new byte[buffer.remaining()]);
} else if (type instanceof DecimalType) {
return Decimal.fromDecimal(value);
}
return value;
}
}
private static class StructLikeInternalRow implements StructLike {
private final DataType[] types;
private InternalRow row = null;
StructLikeInternalRow(StructType struct) {
this.types = new DataType[struct.size()];
StructField[] fields = struct.fields();
for (int i = 0; i < fields.length; i += 1) {
types[i] = fields[i].dataType();
}
}
public StructLikeInternalRow setRow(InternalRow row) {
this.row = row;
return this;
}
@Override
public int size() {
return types.length;
}
@Override
@SuppressWarnings("unchecked")
public <T> T get(int pos, Class<T> javaClass) {
return javaClass.cast(row.get(pos, types[pos]));
}
@Override
public <T> void set(int pos, T value) {
throw new UnsupportedOperationException("Not implemented: set");
}
}
}
| 6,610 |
0 | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark/source/IcebergSource.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.iceberg.spark.source;
import com.google.common.base.Preconditions;
import com.netflix.iceberg.FileFormat;
import com.netflix.iceberg.Schema;
import com.netflix.iceberg.Table;
import com.netflix.iceberg.hadoop.HadoopTables;
import com.netflix.iceberg.spark.SparkSchemaUtil;
import com.netflix.iceberg.types.CheckCompatibility;
import org.apache.hadoop.conf.Configuration;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.sources.DataSourceRegister;
import org.apache.spark.sql.sources.v2.DataSourceV2;
import org.apache.spark.sql.sources.v2.DataSourceOptions;
import org.apache.spark.sql.sources.v2.ReadSupport;
import org.apache.spark.sql.sources.v2.WriteSupport;
import org.apache.spark.sql.sources.v2.reader.DataSourceReader;
import org.apache.spark.sql.sources.v2.writer.DataSourceWriter;
import org.apache.spark.sql.types.StructType;
import java.util.List;
import java.util.Locale;
import java.util.Optional;
import static com.netflix.iceberg.TableProperties.DEFAULT_FILE_FORMAT;
import static com.netflix.iceberg.TableProperties.DEFAULT_FILE_FORMAT_DEFAULT;
public class IcebergSource implements DataSourceV2, ReadSupport, WriteSupport, DataSourceRegister {
private SparkSession lazySpark = null;
private Configuration lazyConf = null;
@Override
public String shortName() {
return "iceberg";
}
@Override
public DataSourceReader createReader(DataSourceOptions options) {
Table table = findTable(options);
return new Reader(table, lazyConf());
}
@Override
public Optional<DataSourceWriter> createWriter(String jobId, StructType dfStruct, SaveMode mode,
DataSourceOptions options) {
Preconditions.checkArgument(mode == SaveMode.Append, "Save mode %s is not supported", mode);
Table table = findTable(options);
Schema dfSchema = SparkSchemaUtil.convert(table.schema(), dfStruct);
List<String> errors = CheckCompatibility.writeCompatibilityErrors(table.schema(), dfSchema);
if (!errors.isEmpty()) {
StringBuilder sb = new StringBuilder();
sb.append("Cannot write incompatible dataframe to table with schema:\n")
.append(table.schema()).append("\nProblems:");
for (String error : errors) {
sb.append("\n* ").append(error);
}
throw new IllegalArgumentException(sb.toString());
}
Optional<String> formatOption = options.get("iceberg.write.format");
FileFormat format;
if (formatOption.isPresent()) {
format = FileFormat.valueOf(formatOption.get().toUpperCase(Locale.ENGLISH));
} else {
format = FileFormat.valueOf(table.properties()
.getOrDefault(DEFAULT_FILE_FORMAT, DEFAULT_FILE_FORMAT_DEFAULT)
.toUpperCase(Locale.ENGLISH));
}
return Optional.of(new Writer(table, lazyConf(), format));
}
protected Table findTable(DataSourceOptions options) {
Optional<String> location = options.get("path");
Preconditions.checkArgument(location.isPresent(),
"Cannot open table without a location: path is not set");
HadoopTables tables = new HadoopTables(lazyConf());
return tables.load(location.get());
}
protected SparkSession lazySparkSession() {
if (lazySpark == null) {
this.lazySpark = SparkSession.builder().getOrCreate();
}
return lazySpark;
}
protected Configuration lazyConf() {
if (lazyConf == null) {
this.lazyConf = lazySparkSession().sparkContext().hadoopConfiguration();
}
return lazyConf;
}
}
| 6,611 |
0 | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark/hacks/Hive.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.iceberg.spark.hacks;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.catalyst.catalog.CatalogTablePartition;
import org.apache.spark.sql.hive.HiveUtils$;
import org.apache.spark.sql.hive.client.HiveClient;
import scala.Option;
import scala.collection.Seq;
import java.util.List;
public class Hive {
public static Seq<CatalogTablePartition> partitions(SparkSession spark, String name) {
List<String> parts = Lists.newArrayList(Splitter.on('.').limit(2).split(name));
String db = parts.size() == 1 ? "default" : parts.get(0);
String table = parts.get(parts.size() == 1 ? 0 : 1);
HiveClient client = HiveUtils$.MODULE$.newClientForMetadata(
spark.sparkContext().conf(),
spark.sparkContext().hadoopConfiguration());
client.getPartitions(db, table, Option.empty());
return client.getPartitions(db, table, Option.empty());
}
}
| 6,612 |
0 | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark/data/SparkOrcWriter.java | /*
* Copyright 2018 Hortonworks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.iceberg.spark.data;
import com.netflix.iceberg.Metrics;
import com.netflix.iceberg.io.FileAppender;
import com.netflix.iceberg.orc.OrcFileAppender;
import org.apache.orc.TypeDescription;
import org.apache.orc.storage.common.type.HiveDecimal;
import org.apache.orc.storage.ql.exec.vector.BytesColumnVector;
import org.apache.orc.storage.ql.exec.vector.ColumnVector;
import org.apache.orc.storage.ql.exec.vector.DecimalColumnVector;
import org.apache.orc.storage.ql.exec.vector.DoubleColumnVector;
import org.apache.orc.storage.ql.exec.vector.ListColumnVector;
import org.apache.orc.storage.ql.exec.vector.LongColumnVector;
import org.apache.orc.storage.ql.exec.vector.MapColumnVector;
import org.apache.orc.storage.ql.exec.vector.StructColumnVector;
import org.apache.orc.storage.ql.exec.vector.TimestampColumnVector;
import org.apache.orc.storage.ql.exec.vector.VectorizedRowBatch;
import org.apache.spark.sql.catalyst.InternalRow;
import org.apache.spark.sql.catalyst.expressions.SpecializedGetters;
import org.apache.spark.sql.catalyst.util.ArrayData;
import org.apache.spark.sql.catalyst.util.DateTimeUtils;
import org.apache.spark.sql.catalyst.util.MapData;
import org.apache.spark.unsafe.types.UTF8String;
import java.io.IOException;
import java.sql.Timestamp;
import java.util.List;
/**
* This class acts as an adaptor from an OrcFileAppender to a
* FileAppender<InternalRow>.
*/
public class SparkOrcWriter implements FileAppender<InternalRow> {
private final static int BATCH_SIZE = 1024;
private final VectorizedRowBatch batch;
private final OrcFileAppender writer;
private final Converter[] converters;
public SparkOrcWriter(OrcFileAppender writer) {
TypeDescription schema = writer.getSchema();
batch = schema.createRowBatch(BATCH_SIZE);
this.writer = writer;
converters = buildConverters(schema);
}
/**
* The interface for the conversion from Spark's SpecializedGetters to
* ORC's ColumnVectors.
*/
interface Converter {
/**
* Take a value from the Spark data value and add it to the ORC output.
* @param rowId the row in the ColumnVector
* @param column either the column number or element number
* @param data either an InternalRow or ArrayData
* @param output the ColumnVector to put the value into
*/
void addValue(int rowId, int column, SpecializedGetters data,
ColumnVector output);
}
static class BooleanConverter implements Converter {
public void addValue(int rowId, int column, SpecializedGetters data,
ColumnVector output) {
if (data.isNullAt(column)) {
output.noNulls = false;
output.isNull[rowId] = true;
} else {
output.isNull[rowId] = false;
((LongColumnVector) output).vector[rowId] = data.getBoolean(column) ? 1 : 0;
}
}
}
static class ByteConverter implements Converter {
public void addValue(int rowId, int column, SpecializedGetters data,
ColumnVector output) {
if (data.isNullAt(column)) {
output.noNulls = false;
output.isNull[rowId] = true;
} else {
output.isNull[rowId] = false;
((LongColumnVector) output).vector[rowId] = data.getByte(column);
}
}
}
static class ShortConverter implements Converter {
public void addValue(int rowId, int column, SpecializedGetters data,
ColumnVector output) {
if (data.isNullAt(column)) {
output.noNulls = false;
output.isNull[rowId] = true;
} else {
output.isNull[rowId] = false;
((LongColumnVector) output).vector[rowId] = data.getShort(column);
}
}
}
static class IntConverter implements Converter {
public void addValue(int rowId, int column, SpecializedGetters data,
ColumnVector output) {
if (data.isNullAt(column)) {
output.noNulls = false;
output.isNull[rowId] = true;
} else {
output.isNull[rowId] = false;
((LongColumnVector) output).vector[rowId] = data.getInt(column);
}
}
}
static class LongConverter implements Converter {
public void addValue(int rowId, int column, SpecializedGetters data,
ColumnVector output) {
if (data.isNullAt(column)) {
output.noNulls = false;
output.isNull[rowId] = true;
} else {
output.isNull[rowId] = false;
((LongColumnVector) output).vector[rowId] = data.getLong(column);
}
}
}
static class FloatConverter implements Converter {
public void addValue(int rowId, int column, SpecializedGetters data,
ColumnVector output) {
if (data.isNullAt(column)) {
output.noNulls = false;
output.isNull[rowId] = true;
} else {
output.isNull[rowId] = false;
((DoubleColumnVector) output).vector[rowId] = data.getFloat(column);
}
}
}
static class DoubleConverter implements Converter {
public void addValue(int rowId, int column, SpecializedGetters data,
ColumnVector output) {
if (data.isNullAt(column)) {
output.noNulls = false;
output.isNull[rowId] = true;
} else {
output.isNull[rowId] = false;
((DoubleColumnVector) output).vector[rowId] = data.getDouble(column);
}
}
}
static class StringConverter implements Converter {
public void addValue(int rowId, int column, SpecializedGetters data,
ColumnVector output) {
if (data.isNullAt(column)) {
output.noNulls = false;
output.isNull[rowId] = true;
} else {
output.isNull[rowId] = false;
byte[] value = data.getUTF8String(column).getBytes();
((BytesColumnVector) output).setRef(rowId, value, 0, value.length);
}
}
}
static class BytesConverter implements Converter {
public void addValue(int rowId, int column, SpecializedGetters data,
ColumnVector output) {
if (data.isNullAt(column)) {
output.noNulls = false;
output.isNull[rowId] = true;
} else {
output.isNull[rowId] = false;
// getBinary always makes a copy, so we don't need to worry about it
// being changed behind our back.
byte[] value = data.getBinary(column);
((BytesColumnVector) output).setRef(rowId, value, 0, value.length);
}
}
}
static class TimestampConverter implements Converter {
public void addValue(int rowId, int column, SpecializedGetters data,
ColumnVector output) {
if (data.isNullAt(column)) {
output.noNulls = false;
output.isNull[rowId] = true;
} else {
output.isNull[rowId] = false;
TimestampColumnVector cv = (TimestampColumnVector) output;
long micros = data.getLong(column);
cv.time[rowId] = (micros / 1_000_000) * 1000;
int nanos = (int) (micros % 1_000_000) * 1000;
if (nanos < 0) {
nanos += 1_000_000_000;
cv.time[rowId] -= 1000;
}
cv.nanos[rowId] = nanos;
}
}
}
static class Decimal18Converter implements Converter {
private final int precision;
private final int scale;
Decimal18Converter(TypeDescription schema) {
precision = schema.getPrecision();
scale = schema.getScale();
}
public void addValue(int rowId, int column, SpecializedGetters data,
ColumnVector output) {
if (data.isNullAt(column)) {
output.noNulls = false;
output.isNull[rowId] = true;
} else {
output.isNull[rowId] = false;
((DecimalColumnVector) output).vector[rowId].setFromLongAndScale(
data.getDecimal(column, precision, scale).toUnscaledLong(), scale);
}
}
}
static class Decimal38Converter implements Converter {
private final int precision;
private final int scale;
Decimal38Converter(TypeDescription schema) {
precision = schema.getPrecision();
scale = schema.getScale();
}
public void addValue(int rowId, int column, SpecializedGetters data,
ColumnVector output) {
if (data.isNullAt(column)) {
output.noNulls = false;
output.isNull[rowId] = true;
} else {
output.isNull[rowId] = false;
((DecimalColumnVector) output).vector[rowId].set(
HiveDecimal.create(data.getDecimal(column, precision, scale)
.toJavaBigDecimal()));
}
}
}
static class StructConverter implements Converter {
private final Converter[] children;
StructConverter(TypeDescription schema) {
children = new Converter[schema.getChildren().size()];
for(int c=0; c < children.length; ++c) {
children[c] = buildConverter(schema.getChildren().get(c));
}
}
public void addValue(int rowId, int column, SpecializedGetters data,
ColumnVector output) {
if (data.isNullAt(column)) {
output.noNulls = false;
output.isNull[rowId] = true;
} else {
output.isNull[rowId] = false;
InternalRow value = data.getStruct(column, children.length);
StructColumnVector cv = (StructColumnVector) output;
for(int c=0; c < children.length; ++c) {
children[c].addValue(rowId, c, value, cv.fields[c]);
}
}
}
}
static class ListConverter implements Converter {
private final Converter children;
ListConverter(TypeDescription schema) {
children = buildConverter(schema.getChildren().get(0));
}
public void addValue(int rowId, int column, SpecializedGetters data,
ColumnVector output) {
if (data.isNullAt(column)) {
output.noNulls = false;
output.isNull[rowId] = true;
} else {
output.isNull[rowId] = false;
ArrayData value = data.getArray(column);
ListColumnVector cv = (ListColumnVector) output;
// record the length and start of the list elements
cv.lengths[rowId] = value.numElements();
cv.offsets[rowId] = cv.childCount;
cv.childCount += cv.lengths[rowId];
// make sure the child is big enough
cv.child.ensureSize(cv.childCount, true);
// Add each element
for(int e=0; e < cv.lengths[rowId]; ++e) {
children.addValue((int) (e + cv.offsets[rowId]), e, value, cv.child);
}
}
}
}
static class MapConverter implements Converter {
private final Converter keyConverter;
private final Converter valueConverter;
MapConverter(TypeDescription schema) {
keyConverter = buildConverter(schema.getChildren().get(0));
valueConverter = buildConverter(schema.getChildren().get(1));
}
public void addValue(int rowId, int column, SpecializedGetters data,
ColumnVector output) {
if (data.isNullAt(column)) {
output.noNulls = false;
output.isNull[rowId] = true;
} else {
output.isNull[rowId] = false;
MapData map = data.getMap(column);
ArrayData key = map.keyArray();
ArrayData value = map.valueArray();
MapColumnVector cv = (MapColumnVector) output;
// record the length and start of the list elements
cv.lengths[rowId] = value.numElements();
cv.offsets[rowId] = cv.childCount;
cv.childCount += cv.lengths[rowId];
// make sure the child is big enough
cv.keys.ensureSize(cv.childCount, true);
cv.values.ensureSize(cv.childCount, true);
// Add each element
for(int e=0; e < cv.lengths[rowId]; ++e) {
int pos = (int)(e + cv.offsets[rowId]);
keyConverter.addValue(pos, e, key, cv.keys);
valueConverter.addValue(pos, e, value, cv.values);
}
}
}
}
private static Converter buildConverter(TypeDescription schema) {
switch (schema.getCategory()) {
case BOOLEAN:
return new BooleanConverter();
case BYTE:
return new ByteConverter();
case SHORT:
return new ShortConverter();
case DATE:
case INT:
return new IntConverter();
case LONG:
return new LongConverter();
case FLOAT:
return new FloatConverter();
case DOUBLE:
return new DoubleConverter();
case BINARY:
return new BytesConverter();
case STRING:
case CHAR:
case VARCHAR:
return new StringConverter();
case DECIMAL:
return schema.getPrecision() <= 18
? new Decimal18Converter(schema)
: new Decimal38Converter(schema);
case TIMESTAMP:
return new TimestampConverter();
case STRUCT:
return new StructConverter(schema);
case LIST:
return new ListConverter(schema);
case MAP:
return new MapConverter(schema);
}
throw new IllegalArgumentException("Unhandled type " + schema);
}
private static Converter[] buildConverters(TypeDescription schema) {
if (schema.getCategory() != TypeDescription.Category.STRUCT) {
throw new IllegalArgumentException("Top level must be a struct " + schema);
}
List<TypeDescription> children = schema.getChildren();
Converter[] result = new Converter[children.size()];
for(int c=0; c < children.size(); ++c) {
result[c] = buildConverter(children.get(c));
}
return result;
}
@Override
public void add(InternalRow datum) {
int row = batch.size++;
for(int c=0; c < converters.length; ++c) {
converters[c].addValue(row, c, datum, batch.cols[c]);
}
if (batch.size == BATCH_SIZE) {
writer.add(batch);
batch.reset();
}
}
@Override
public Metrics metrics() {
return writer.metrics();
}
@Override
public void close() throws IOException {
if (batch.size > 0) {
writer.add(batch);
batch.reset();
}
writer.close();
}
}
| 6,613 |
0 | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark/data/SparkAvroWriter.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.iceberg.spark.data;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.netflix.iceberg.avro.AvroSchemaUtil;
import com.netflix.iceberg.avro.AvroSchemaVisitor;
import com.netflix.iceberg.avro.ValueWriter;
import com.netflix.iceberg.avro.ValueWriters;
import com.netflix.iceberg.types.Type;
import org.apache.avro.LogicalType;
import org.apache.avro.LogicalTypes;
import org.apache.avro.Schema;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.Encoder;
import org.apache.spark.sql.catalyst.InternalRow;
import org.apache.spark.sql.types.DataType;
import java.io.IOException;
import java.util.List;
import static com.netflix.iceberg.avro.AvroSchemaUtil.getFieldId;
import static com.netflix.iceberg.avro.AvroSchemaVisitor.visit;
import static com.netflix.iceberg.spark.SparkSchemaUtil.convert;
public class SparkAvroWriter implements DatumWriter<InternalRow> {
private final com.netflix.iceberg.Schema schema;
private ValueWriter<InternalRow> writer = null;
public SparkAvroWriter(com.netflix.iceberg.Schema schema) {
this.schema = schema;
}
@Override
@SuppressWarnings("unchecked")
public void setSchema(Schema schema) {
this.writer = (ValueWriter<InternalRow>) visit(schema, new WriteBuilder(this.schema));
}
@Override
public void write(InternalRow datum, Encoder out) throws IOException {
writer.write(datum, out);
}
private static class WriteBuilder extends AvroSchemaVisitor<ValueWriter<?>> {
private final com.netflix.iceberg.Schema schema;
private WriteBuilder(com.netflix.iceberg.Schema schema) {
this.schema = schema;
}
@Override
public ValueWriter<?> record(Schema record, List<String> names, List<ValueWriter<?>> fields) {
List<DataType> types = Lists.newArrayList();
for (Schema.Field field : record.getFields()) {
types.add(convert(schema.findType(getFieldId(field))));
}
return SparkValueWriters.struct(fields, types);
}
@Override
public ValueWriter<?> union(Schema union, List<ValueWriter<?>> options) {
Preconditions.checkArgument(options.contains(ValueWriters.nulls()),
"Cannot create writer for non-option union: " + union);
Preconditions.checkArgument(options.size() == 2,
"Cannot create writer for non-option union: " + union);
if (union.getTypes().get(0).getType() == Schema.Type.NULL) {
return ValueWriters.option(0, options.get(1));
} else {
return ValueWriters.option(1, options.get(0));
}
}
@Override
public ValueWriter<?> array(Schema array, ValueWriter<?> elementWriter) {
LogicalType logical = array.getLogicalType();
if (logical != null && "map".equals(logical.getName())) {
Type keyType = schema.findType(getFieldId(array.getElementType().getField("key")));
Type valueType = schema.findType(getFieldId(array.getElementType().getField("value")));
ValueWriter<?>[] writers = ((SparkValueWriters.StructWriter) elementWriter).writers;
return SparkValueWriters.arrayMap(
writers[0], convert(keyType), writers[1], convert(valueType));
}
Type elementType = schema.findType(AvroSchemaUtil.getElementId(array));
return SparkValueWriters.array(elementWriter, convert(elementType));
}
@Override
public ValueWriter<?> map(Schema map, ValueWriter<?> valueReader) {
Type keyType = schema.findType(AvroSchemaUtil.getKeyId(map));
Type valueType = schema.findType(AvroSchemaUtil.getValueId(map));
return SparkValueWriters.map(
SparkValueWriters.strings(), convert(keyType), valueReader, convert(valueType));
}
@Override
public ValueWriter<?> primitive(Schema primitive) {
LogicalType logicalType = primitive.getLogicalType();
if (logicalType != null) {
switch (logicalType.getName()) {
case "date":
// Spark uses the same representation
return ValueWriters.ints();
case "timestamp-micros":
// Spark uses the same representation
return ValueWriters.longs();
case "decimal":
LogicalTypes.Decimal decimal = (LogicalTypes.Decimal) logicalType;
return SparkValueWriters.decimal(decimal.getPrecision(), decimal.getScale());
case "uuid":
return ValueWriters.uuids();
default:
throw new IllegalArgumentException("Unsupported logical type: " + logicalType);
}
}
switch (primitive.getType()) {
case NULL:
return ValueWriters.nulls();
case BOOLEAN:
return ValueWriters.booleans();
case INT:
return ValueWriters.ints();
case LONG:
return ValueWriters.longs();
case FLOAT:
return ValueWriters.floats();
case DOUBLE:
return ValueWriters.doubles();
case STRING:
return SparkValueWriters.strings();
case FIXED:
return ValueWriters.fixed(primitive.getFixedSize());
case BYTES:
return ValueWriters.bytes();
default:
throw new IllegalArgumentException("Unsupported type: " + primitive);
}
}
}
}
| 6,614 |
0 | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark/data/SparkAvroReader.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.iceberg.spark.data;
import com.google.common.collect.MapMaker;
import com.netflix.iceberg.avro.AvroSchemaVisitor;
import com.netflix.iceberg.avro.ValueReader;
import com.netflix.iceberg.avro.ValueReaders;
import com.netflix.iceberg.exceptions.RuntimeIOException;
import org.apache.avro.LogicalType;
import org.apache.avro.LogicalTypes;
import org.apache.avro.Schema;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.ResolvingDecoder;
import org.apache.spark.sql.catalyst.InternalRow;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class SparkAvroReader implements DatumReader<InternalRow> {
private static final ThreadLocal<Map<Schema, Map<Schema, ResolvingDecoder>>> DECODER_CACHES =
ThreadLocal.withInitial(() -> new MapMaker().weakKeys().makeMap());
private final Schema readSchema;
private final ValueReader<InternalRow> reader;
private Schema fileSchema = null;
@SuppressWarnings("unchecked")
public SparkAvroReader(Schema readSchema) {
this.readSchema = readSchema;
this.reader = (ValueReader<InternalRow>) AvroSchemaVisitor.visit(readSchema, new ReadBuilder());
}
@Override
public void setSchema(Schema fileSchema) {
this.fileSchema = Schema.applyAliases(fileSchema, readSchema);
}
@Override
public InternalRow read(InternalRow reuse, Decoder decoder) throws IOException {
ResolvingDecoder resolver = resolve(decoder);
InternalRow row = reader.read(resolver, reuse);
resolver.drain();
return row;
}
private ResolvingDecoder resolve(Decoder decoder) throws IOException {
Map<Schema, Map<Schema, ResolvingDecoder>> cache = DECODER_CACHES.get();
Map<Schema, ResolvingDecoder> fileSchemaToResolver = cache
.computeIfAbsent(readSchema, k -> new HashMap<>());
ResolvingDecoder resolver = fileSchemaToResolver.get(fileSchema);
if (resolver == null) {
resolver = newResolver();
fileSchemaToResolver.put(fileSchema, resolver);
}
resolver.configure(decoder);
return resolver;
}
private ResolvingDecoder newResolver() {
try {
return DecoderFactory.get().resolvingDecoder(fileSchema, readSchema, null);
} catch (IOException e) {
throw new RuntimeIOException(e);
}
}
private static class ReadBuilder extends AvroSchemaVisitor<ValueReader<?>> {
private ReadBuilder() {
}
@Override
public ValueReader<?> record(Schema record, List<String> names, List<ValueReader<?>> fields) {
return SparkValueReaders.struct(fields);
}
@Override
public ValueReader<?> union(Schema union, List<ValueReader<?>> options) {
return ValueReaders.union(options);
}
@Override
public ValueReader<?> array(Schema array, ValueReader<?> elementReader) {
LogicalType logical = array.getLogicalType();
if (logical != null && "map".equals(logical.getName())) {
ValueReader<?>[] keyValueReaders = ((SparkValueReaders.StructReader) elementReader).readers;
return SparkValueReaders.arrayMap(keyValueReaders[0], keyValueReaders[1]);
}
return SparkValueReaders.array(elementReader);
}
@Override
public ValueReader<?> map(Schema map, ValueReader<?> valueReader) {
return SparkValueReaders.map(SparkValueReaders.strings(), valueReader);
}
@Override
public ValueReader<?> primitive(Schema primitive) {
LogicalType logicalType = primitive.getLogicalType();
if (logicalType != null) {
switch (logicalType.getName()) {
case "date":
// Spark uses the same representation
return ValueReaders.ints();
case "timestamp-millis":
// adjust to microseconds
ValueReader<Long> longs = ValueReaders.longs();
return (ValueReader<Long>) (decoder, ignored) -> longs.read(decoder, null) * 1000L;
case "timestamp-micros":
// Spark uses the same representation
return ValueReaders.longs();
case "decimal":
ValueReader<byte[]> inner;
switch (primitive.getType()) {
case FIXED:
inner = ValueReaders.fixed(primitive.getFixedSize());
break;
case BYTES:
inner = ValueReaders.bytes();
break;
default:
throw new IllegalArgumentException(
"Invalid primitive type for decimal: " + primitive.getType());
}
LogicalTypes.Decimal decimal = (LogicalTypes.Decimal) logicalType;
return SparkValueReaders.decimal(inner, decimal.getScale());
case "uuid":
return SparkValueReaders.uuids();
default:
throw new IllegalArgumentException("Unknown logical type: " + logicalType);
}
}
switch (primitive.getType()) {
case NULL:
return ValueReaders.nulls();
case BOOLEAN:
return ValueReaders.booleans();
case INT:
return ValueReaders.ints();
case LONG:
return ValueReaders.longs();
case FLOAT:
return ValueReaders.floats();
case DOUBLE:
return ValueReaders.doubles();
case STRING:
return SparkValueReaders.strings();
case FIXED:
return ValueReaders.fixed(primitive.getFixedSize());
case BYTES:
return ValueReaders.bytes();
default:
throw new IllegalArgumentException("Unsupported type: " + primitive);
}
}
}
}
| 6,615 |
0 | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark/data/SparkOrcReader.java | /*
* Copyright 2018 Hortonworks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.iceberg.spark.data;
import com.netflix.iceberg.FileScanTask;
import com.netflix.iceberg.Schema;
import com.netflix.iceberg.io.InputFile;
import com.netflix.iceberg.orc.ColumnIdMap;
import com.netflix.iceberg.orc.ORC;
import com.netflix.iceberg.orc.OrcIterator;
import com.netflix.iceberg.orc.TypeConversion;
import org.apache.orc.TypeDescription;
import org.apache.orc.storage.common.type.FastHiveDecimal;
import org.apache.orc.storage.ql.exec.vector.BytesColumnVector;
import org.apache.orc.storage.ql.exec.vector.ColumnVector;
import org.apache.orc.storage.ql.exec.vector.DecimalColumnVector;
import org.apache.orc.storage.ql.exec.vector.DoubleColumnVector;
import org.apache.orc.storage.ql.exec.vector.ListColumnVector;
import org.apache.orc.storage.ql.exec.vector.LongColumnVector;
import org.apache.orc.storage.ql.exec.vector.MapColumnVector;
import org.apache.orc.storage.ql.exec.vector.StructColumnVector;
import org.apache.orc.storage.ql.exec.vector.TimestampColumnVector;
import org.apache.orc.storage.ql.exec.vector.VectorizedRowBatch;
import org.apache.orc.storage.serde2.io.DateWritable;
import org.apache.orc.storage.serde2.io.HiveDecimalWritable;
import org.apache.spark.sql.catalyst.InternalRow;
import org.apache.spark.sql.catalyst.expressions.SpecializedGetters;
import org.apache.spark.sql.catalyst.expressions.UnsafeRow;
import org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder;
import org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter;
import org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter;
import org.apache.spark.sql.catalyst.util.ArrayData;
import org.apache.spark.sql.catalyst.util.MapData;
import org.apache.spark.sql.types.Decimal;
import org.apache.spark.unsafe.Platform;
import org.apache.spark.unsafe.array.ByteArrayMethods;
import java.io.Closeable;
import java.io.IOException;
import java.math.BigDecimal;
import java.sql.Timestamp;
import java.util.Iterator;
import java.util.List;
/**
* Converts the OrcInterator, which returns ORC's VectorizedRowBatch to a
* set of Spark's UnsafeRows.
*
* It minimizes allocations by reusing most of the objects in the implementation.
*/
public class SparkOrcReader implements Iterator<InternalRow>, Closeable {
private final static int INITIAL_SIZE = 128 * 1024;
private final OrcIterator reader;
private final TypeDescription orcSchema;
private final UnsafeRow row;
private final BufferHolder holder;
private final UnsafeRowWriter writer;
private int nextRow = 0;
private VectorizedRowBatch current = null;
private Converter[] converter;
public SparkOrcReader(InputFile location,
FileScanTask task,
Schema readSchema) {
ColumnIdMap columnIds = new ColumnIdMap();
orcSchema = TypeConversion.toOrc(readSchema, columnIds);
reader = ORC.read(location)
.split(task.start(), task.length())
.schema(readSchema)
.build();
int numFields = readSchema.columns().size();
row = new UnsafeRow(numFields);
holder = new BufferHolder(row, INITIAL_SIZE);
writer = new UnsafeRowWriter(holder, numFields);
converter = new Converter[numFields];
for(int c=0; c < numFields; ++c) {
converter[c] = buildConverter(holder, orcSchema.getChildren().get(c));
}
}
@Override
public boolean hasNext() {
return (current != null && nextRow < current.size) || reader.hasNext();
}
@Override
public UnsafeRow next() {
if (current == null || nextRow >= current.size) {
current = reader.next();
nextRow = 0;
}
// Reset the holder to start the buffer over again.
// BufferHolder.reset does the wrong thing...
holder.cursor = Platform.BYTE_ARRAY_OFFSET;
writer.reset();
for(int c=0; c < current.cols.length; ++c) {
converter[c].convert(writer, c, current.cols[c], nextRow);
}
nextRow++;
return row;
}
@Override
public void close() throws IOException {
reader.close();
}
private static void printRow(SpecializedGetters row, TypeDescription schema) {
List<TypeDescription> children = schema.getChildren();
System.out.print("{");
for(int c = 0; c < children.size(); ++c) {
System.out.print("\"" + schema.getFieldNames().get(c) + "\": ");
printRow(row, c, children.get(c));
}
System.out.print("}");
}
private static void printRow(SpecializedGetters row, int ord, TypeDescription schema) {
switch (schema.getCategory()) {
case BOOLEAN:
System.out.print(row.getBoolean(ord));
break;
case BYTE:
System.out.print(row.getByte(ord));
break;
case SHORT:
System.out.print(row.getShort(ord));
break;
case INT:
System.out.print(row.getInt(ord));
break;
case LONG:
System.out.print(row.getLong(ord));
break;
case FLOAT:
System.out.print(row.getFloat(ord));
break;
case DOUBLE:
System.out.print(row.getDouble(ord));
break;
case CHAR:
case VARCHAR:
case STRING:
System.out.print("\"" + row.getUTF8String(ord) + "\"");
break;
case BINARY: {
byte[] bin = row.getBinary(ord);
if (bin == null) {
System.out.print("null");
} else {
System.out.print("[");
for (int i = 0; i < bin.length; ++i) {
if (i != 0) {
System.out.print(", ");
}
int v = bin[i] & 0xff;
if (v < 16) {
System.out.print("0" + Integer.toHexString(v));
} else {
System.out.print(Integer.toHexString(v));
}
}
System.out.print("]");
}
break;
}
case DECIMAL:
System.out.print(row.getDecimal(ord, schema.getPrecision(), schema.getScale()));
break;
case DATE:
System.out.print("\"" + new DateWritable(row.getInt(ord)) + "\"");
break;
case TIMESTAMP:
System.out.print("\"" + new Timestamp(row.getLong(ord)) + "\"");
break;
case STRUCT:
printRow(row.getStruct(ord, schema.getChildren().size()), schema);
break;
case LIST: {
TypeDescription child = schema.getChildren().get(0);
System.out.print("[");
ArrayData list = row.getArray(ord);
for(int e=0; e < list.numElements(); ++e) {
if (e != 0) {
System.out.print(", ");
}
printRow(list, e, child);
}
System.out.print("]");
break;
}
case MAP: {
TypeDescription keyType = schema.getChildren().get(0);
TypeDescription valueType = schema.getChildren().get(1);
MapData map = row.getMap(ord);
ArrayData keys = map.keyArray();
ArrayData values = map.valueArray();
System.out.print("[");
for(int e=0; e < map.numElements(); ++e) {
if (e != 0) {
System.out.print(", ");
}
printRow(keys, e, keyType);
System.out.print(": ");
printRow(values, e, valueType);
}
System.out.print("]");
break;
}
default:
throw new IllegalArgumentException("Unhandled type " + schema);
}
}
static int getArrayElementSize(TypeDescription type) {
switch (type.getCategory()) {
case BOOLEAN:
case BYTE:
return 1;
case SHORT:
return 2;
case INT:
case FLOAT:
return 4;
default:
return 8;
}
}
/**
* The common interface for converting from a ORC ColumnVector to a Spark
* UnsafeRow. UnsafeRows need two different interfaces for writers and thus
* we have two methods the first is for structs (UnsafeRowWriter) and the
* second is for lists and maps (UnsafeArrayWriter). If Spark adds a common
* interface similar to SpecializedGetters we could that and a single set of
* methods.
*/
interface Converter {
void convert(UnsafeRowWriter writer, int column, ColumnVector vector, int row);
void convert(UnsafeArrayWriter writer, int element, ColumnVector vector,
int row);
}
private static class BooleanConverter implements Converter {
@Override
public void convert(UnsafeRowWriter writer, int column, ColumnVector vector, int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNullAt(column);
} else {
writer.write(column, ((LongColumnVector) vector).vector[row] != 0);
}
}
@Override
public void convert(UnsafeArrayWriter writer, int element,
ColumnVector vector, int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNull(element);
} else {
writer.write(element, ((LongColumnVector) vector).vector[row] != 0);
}
}
}
private static class ByteConverter implements Converter {
@Override
public void convert(UnsafeRowWriter writer, int column, ColumnVector vector,
int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNullAt(column);
} else {
writer.write(column, (byte) ((LongColumnVector) vector).vector[row]);
}
}
@Override
public void convert(UnsafeArrayWriter writer, int element,
ColumnVector vector, int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNull(element);
} else {
writer.write(element, (byte) ((LongColumnVector) vector).vector[row]);
}
}
}
private static class ShortConverter implements Converter {
@Override
public void convert(UnsafeRowWriter writer, int column, ColumnVector vector,
int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNullAt(column);
} else {
writer.write(column, (short) ((LongColumnVector) vector).vector[row]);
}
}
@Override
public void convert(UnsafeArrayWriter writer, int element,
ColumnVector vector, int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNull(element);
} else {
writer.write(element, (short) ((LongColumnVector) vector).vector[row]);
}
}
}
private static class IntConverter implements Converter {
@Override
public void convert(UnsafeRowWriter writer, int column, ColumnVector vector,
int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNullAt(column);
} else {
writer.write(column, (int) ((LongColumnVector) vector).vector[row]);
}
}
@Override
public void convert(UnsafeArrayWriter writer, int element,
ColumnVector vector, int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNull(element);
} else {
writer.write(element, (int) ((LongColumnVector) vector).vector[row]);
}
}
}
private static class LongConverter implements Converter {
@Override
public void convert(UnsafeRowWriter writer, int column, ColumnVector vector,
int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNullAt(column);
} else {
writer.write(column, ((LongColumnVector) vector).vector[row]);
}
}
@Override
public void convert(UnsafeArrayWriter writer, int element,
ColumnVector vector, int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNull(element);
} else {
writer.write(element, ((LongColumnVector) vector).vector[row]);
}
}
}
private static class FloatConverter implements Converter {
@Override
public void convert(UnsafeRowWriter writer, int column, ColumnVector vector,
int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNullAt(column);
} else {
writer.write(column, (float) ((DoubleColumnVector) vector).vector[row]);
}
}
@Override
public void convert(UnsafeArrayWriter writer, int element,
ColumnVector vector, int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNull(element);
} else {
writer.write(element, (float) ((DoubleColumnVector) vector).vector[row]);
}
}
}
private static class DoubleConverter implements Converter {
@Override
public void convert(UnsafeRowWriter writer, int column, ColumnVector vector,
int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNullAt(column);
} else {
writer.write(column, ((DoubleColumnVector) vector).vector[row]);
}
}
@Override
public void convert(UnsafeArrayWriter writer, int element,
ColumnVector vector, int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNull(element);
} else {
writer.write(element, ((DoubleColumnVector) vector).vector[row]);
}
}
}
private static class TimestampConverter implements Converter {
private long convert(TimestampColumnVector vector, int row) {
// compute microseconds past 1970.
long micros = (vector.time[row]/1000) * 1_000_000 + vector.nanos[row] / 1000;
return micros;
}
@Override
public void convert(UnsafeRowWriter writer, int column, ColumnVector vector,
int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNullAt(column);
} else {
writer.write(column, convert((TimestampColumnVector) vector, row));
}
}
@Override
public void convert(UnsafeArrayWriter writer, int element,
ColumnVector vector, int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNull(element);
} else {
writer.write(element, convert((TimestampColumnVector) vector, row));
}
}
}
/**
* UnsafeArrayWriter doesn't have a binary form that lets the user pass an
* offset and length, so I've added one here. It is the minor tweak of the
* UnsafeArrayWriter.write(int, byte[]) method.
* @param holder the BufferHolder where the bytes are being written
* @param writer the UnsafeArrayWriter
* @param ordinal the element that we are writing into
* @param input the input bytes
* @param offset the first byte from input
* @param length the number of bytes to write
*/
static void write(BufferHolder holder, UnsafeArrayWriter writer, int ordinal,
byte[] input, int offset, int length) {
final int roundedSize = ByteArrayMethods.roundNumberOfBytesToNearestWord(length);
// grow the global buffer before writing data.
holder.grow(roundedSize);
if ((length & 0x07) > 0) {
Platform.putLong(holder.buffer, holder.cursor + ((length >> 3) << 3), 0L);
}
// Write the bytes to the variable length portion.
Platform.copyMemory(input, Platform.BYTE_ARRAY_OFFSET + offset,
holder.buffer, holder.cursor, length);
writer.setOffsetAndSize(ordinal, holder.cursor, length);
// move the cursor forward.
holder.cursor += roundedSize;
}
private static class BinaryConverter implements Converter {
private final BufferHolder holder;
BinaryConverter(BufferHolder holder) {
this.holder = holder;
}
@Override
public void convert(UnsafeRowWriter writer, int column, ColumnVector vector,
int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNullAt(column);
} else {
BytesColumnVector v = (BytesColumnVector) vector;
writer.write(column, v.vector[row], v.start[row], v.length[row]);
}
}
@Override
public void convert(UnsafeArrayWriter writer, int element,
ColumnVector vector, int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNull(element);
} else {
BytesColumnVector v = (BytesColumnVector) vector;
write(holder, writer, element, v.vector[row], v.start[row],
v.length[row]);
}
}
}
/**
* This hack is to get the unscaled value (for precision <= 18) quickly.
* This can be replaced when we upgrade to storage-api 2.5.0.
*/
static class DecimalHack extends FastHiveDecimal {
long unscaledLong(FastHiveDecimal value) {
fastSet(value);
return fastSignum * fast1 * 10_000_000_000_000_000L + fast0;
}
}
private static class Decimal18Converter implements Converter {
final DecimalHack hack = new DecimalHack();
final int precision;
final int scale;
Decimal18Converter(int precision, int scale) {
this.precision = precision;
this.scale = scale;
}
@Override
public void convert(UnsafeRowWriter writer, int column, ColumnVector vector,
int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNullAt(column);
} else {
HiveDecimalWritable v = ((DecimalColumnVector) vector).vector[row];
writer.write(column,
new Decimal().set(hack.unscaledLong(v), precision, v.scale()),
precision, scale);
}
}
@Override
public void convert(UnsafeArrayWriter writer, int element,
ColumnVector vector, int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNull(element);
} else {
HiveDecimalWritable v = ((DecimalColumnVector) vector).vector[row];
writer.write(element,
new Decimal().set(hack.unscaledLong(v), precision, v.scale()),
precision, scale);
}
}
}
private static class Decimal38Converter implements Converter {
final int precision;
final int scale;
Decimal38Converter(int precision, int scale) {
this.precision = precision;
this.scale = scale;
}
@Override
public void convert(UnsafeRowWriter writer, int column, ColumnVector vector,
int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNullAt(column);
} else {
BigDecimal v = ((DecimalColumnVector) vector).vector[row]
.getHiveDecimal().bigDecimalValue();
writer.write(column,
new Decimal().set(new scala.math.BigDecimal(v), precision, scale),
precision, scale);
}
}
@Override
public void convert(UnsafeArrayWriter writer, int element,
ColumnVector vector, int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNull(element);
} else {
BigDecimal v = ((DecimalColumnVector) vector).vector[row]
.getHiveDecimal().bigDecimalValue();
writer.write(element,
new Decimal().set(new scala.math.BigDecimal(v), precision, scale),
precision, scale);
}
}
}
private static class StructConverter implements Converter {
private final BufferHolder holder;
private final Converter[] children;
private final UnsafeRowWriter childWriter;
StructConverter(BufferHolder holder, TypeDescription schema) {
this.holder = holder;
children = new Converter[schema.getChildren().size()];
for(int c=0; c < children.length; ++c) {
children[c] = buildConverter(holder, schema.getChildren().get(c));
}
childWriter = new UnsafeRowWriter(holder, children.length);
}
int writeStruct(StructColumnVector vector, int row) {
int start = holder.cursor;
childWriter.reset();
for(int c=0; c < children.length; ++c) {
children[c].convert(childWriter, c, vector.fields[c], row);
}
return start;
}
@Override
public void convert(UnsafeRowWriter writer, int column, ColumnVector vector, int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNullAt(column);
} else {
int start = writeStruct((StructColumnVector) vector, row);
writer.setOffsetAndSize(column, start, holder.cursor - start);
}
}
@Override
public void convert(UnsafeArrayWriter writer, int element,
ColumnVector vector, int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNull(element);
} else {
int start = writeStruct((StructColumnVector) vector, row);
writer.setOffsetAndSize(element, start, holder.cursor - start);
}
}
}
private static class ListConverter implements Converter {
private final BufferHolder holder;
private final Converter children;
private final UnsafeArrayWriter childWriter;
private final int elementSize;
ListConverter(BufferHolder holder, TypeDescription schema) {
this.holder = holder;
TypeDescription child = schema.getChildren().get(0);
children = buildConverter(holder, child);
childWriter = new UnsafeArrayWriter();
elementSize = getArrayElementSize(child);
}
int writeList(ListColumnVector v, int row) {
int offset = (int) v.offsets[row];
int length = (int) v.lengths[row];
int start = holder.cursor;
childWriter.initialize(holder, length, elementSize);
for(int c=0; c < length; ++c) {
children.convert(childWriter, c, v.child, offset + c);
}
return start;
}
@Override
public void convert(UnsafeRowWriter writer, int column, ColumnVector vector,
int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNullAt(column);
} else {
int start = writeList((ListColumnVector) vector, row);
writer.setOffsetAndSize(column, start, holder.cursor - start);
}
}
@Override
public void convert(UnsafeArrayWriter writer, int element,
ColumnVector vector, int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNull(element);
} else {
int start = writeList((ListColumnVector) vector, row);
writer.setOffsetAndSize(element, start, holder.cursor - start);
}
}
}
private static class MapConverter implements Converter {
private final BufferHolder holder;
private final Converter keyConvert;
private final Converter valueConvert;
private final UnsafeArrayWriter childWriter;
private final int keySize;
private final int valueSize;
MapConverter(BufferHolder holder, TypeDescription schema) {
this.holder = holder;
TypeDescription keyType = schema.getChildren().get(0);
TypeDescription valueType = schema.getChildren().get(1);
keyConvert = buildConverter(holder, keyType);
keySize = getArrayElementSize(keyType);
valueConvert = buildConverter(holder, valueType);
valueSize = getArrayElementSize(valueType);
childWriter = new UnsafeArrayWriter();
}
int writeMap(MapColumnVector v, int row) {
int offset = (int) v.offsets[row];
int length = (int) v.lengths[row];
int start = holder.cursor;
// save room for the key size
final int KEY_SIZE_BYTES = 8;
holder.grow(KEY_SIZE_BYTES);
holder.cursor += KEY_SIZE_BYTES;
// serialize the keys
childWriter.initialize(holder, length, keySize);
for(int c=0; c < length; ++c) {
keyConvert.convert(childWriter, c, v.keys, offset + c);
}
// store the serialized size of the keys
Platform.putLong(holder.buffer, start, holder.cursor - start - KEY_SIZE_BYTES);
// serialize the values
childWriter.initialize(holder, length, valueSize);
for(int c=0; c < length; ++c) {
valueConvert.convert(childWriter, c, v.values, offset + c);
}
return start;
}
@Override
public void convert(UnsafeRowWriter writer, int column, ColumnVector vector,
int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNullAt(column);
} else {
int start = writeMap((MapColumnVector) vector, row);
writer.setOffsetAndSize(column, start, holder.cursor - start);
}
}
@Override
public void convert(UnsafeArrayWriter writer, int element,
ColumnVector vector, int row) {
if (vector.isRepeating) {
row = 0;
}
if (!vector.noNulls && vector.isNull[row]) {
writer.setNull(element);
} else {
int start = writeMap((MapColumnVector) vector, row);
writer.setOffsetAndSize(element, start, holder.cursor - start);
}
}
}
static Converter buildConverter(BufferHolder holder, TypeDescription schema) {
switch (schema.getCategory()) {
case BOOLEAN:
return new BooleanConverter();
case BYTE:
return new ByteConverter();
case SHORT:
return new ShortConverter();
case DATE:
case INT:
return new IntConverter();
case LONG:
return new LongConverter();
case FLOAT:
return new FloatConverter();
case DOUBLE:
return new DoubleConverter();
case TIMESTAMP:
return new TimestampConverter();
case DECIMAL:
if (schema.getPrecision() <= Decimal.MAX_LONG_DIGITS()) {
return new Decimal18Converter(schema.getPrecision(), schema.getScale());
} else {
return new Decimal38Converter(schema.getPrecision(), schema.getScale());
}
case BINARY:
case STRING:
case CHAR:
case VARCHAR:
return new BinaryConverter(holder);
case STRUCT:
return new StructConverter(holder, schema);
case LIST:
return new ListConverter(holder, schema);
case MAP:
return new MapConverter(holder, schema);
default:
throw new IllegalArgumentException("Unhandled type " + schema);
}
}
}
| 6,616 |
0 | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark/data/SparkValueWriters.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.iceberg.spark.data;
import com.google.common.base.Preconditions;
import com.netflix.iceberg.avro.ValueWriter;
import com.netflix.iceberg.types.TypeUtil;
import org.apache.avro.io.Encoder;
import org.apache.avro.util.Utf8;
import org.apache.spark.sql.catalyst.InternalRow;
import org.apache.spark.sql.catalyst.util.ArrayData;
import org.apache.spark.sql.catalyst.util.MapData;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.Decimal;
import org.apache.spark.unsafe.types.UTF8String;
import java.io.IOException;
import java.lang.reflect.Array;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.List;
import java.util.UUID;
public class SparkValueWriters {
static ValueWriter<UTF8String> strings() {
return StringWriter.INSTANCE;
}
static ValueWriter<UTF8String> uuids() {
return UUIDWriter.INSTANCE;
}
static ValueWriter<Decimal> decimal(int precision, int scale) {
return new DecimalWriter(precision, scale);
}
static <T> ValueWriter<ArrayData> array(ValueWriter<T> elementWriter, DataType elementType) {
return new ArrayWriter<>(elementWriter, elementType);
}
static <K, V> ValueWriter<MapData> arrayMap(
ValueWriter<K> keyWriter, DataType keyType, ValueWriter<V> valueWriter, DataType valueType) {
return new ArrayMapWriter<>(keyWriter, keyType, valueWriter, valueType);
}
static <K, V> ValueWriter<MapData> map(
ValueWriter<K> keyWriter, DataType keyType, ValueWriter<V> valueWriter, DataType valueType) {
return new MapWriter<>(keyWriter, keyType, valueWriter, valueType);
}
static ValueWriter<InternalRow> struct(List<ValueWriter<?>> writers, List<DataType> types) {
return new StructWriter(writers, types);
}
private static class StringWriter implements ValueWriter<UTF8String> {
private static StringWriter INSTANCE = new StringWriter();
private StringWriter() {
}
@Override
public void write(UTF8String s, Encoder encoder) throws IOException {
// use getBytes because it may return the backing byte array if available.
// otherwise, it copies to a new byte array, which is still cheaper than Avro
// calling toString, which incurs encoding costs
encoder.writeString(new Utf8(s.getBytes()));
}
}
private static class UUIDWriter implements ValueWriter<UTF8String> {
private static final ThreadLocal<ByteBuffer> BUFFER = ThreadLocal.withInitial(() -> {
ByteBuffer buffer = ByteBuffer.allocate(16);
buffer.order(ByteOrder.BIG_ENDIAN);
return buffer;
});
private static UUIDWriter INSTANCE = new UUIDWriter();
private UUIDWriter() {
}
@Override
public void write(UTF8String s, Encoder encoder) throws IOException {
// TODO: direct conversion from string to byte buffer
UUID uuid = UUID.fromString(s.toString());
ByteBuffer buffer = BUFFER.get();
buffer.rewind();
buffer.putLong(uuid.getMostSignificantBits());
buffer.putLong(uuid.getLeastSignificantBits());
encoder.writeFixed(buffer.array());
}
}
private static class DecimalWriter implements ValueWriter<Decimal> {
private final int precision;
private final int scale;
private final int length;
private final ThreadLocal<byte[]> bytes;
private DecimalWriter(int precision, int scale) {
this.precision = precision;
this.scale = scale;
this.length = TypeUtil.decimalRequriedBytes(precision);
this.bytes = ThreadLocal.withInitial(() -> new byte[length]);
}
@Override
public void write(Decimal d, Encoder encoder) throws IOException {
Preconditions.checkArgument(d.scale() == scale,
"Cannot write value as decimal(%s,%s), wrong scale: %s", precision, scale, d);
Preconditions.checkArgument(d.precision() <= precision,
"Cannot write value as decimal(%s,%s), too large: %s", precision, scale, d);
BigDecimal decimal = d.toJavaBigDecimal();
byte fillByte = (byte) (decimal.signum() < 0 ? 0xFF : 0x00);
byte[] unscaled = decimal.unscaledValue().toByteArray();
byte[] buf = bytes.get();
int offset = length - unscaled.length;
for (int i = 0; i < length; i += 1) {
if (i < offset) {
buf[i] = fillByte;
} else {
buf[i] = unscaled[i - offset];
}
}
encoder.writeFixed(buf);
}
}
private static class ArrayWriter<T> implements ValueWriter<ArrayData> {
private final ValueWriter<T> elementWriter;
private final DataType elementType;
private ArrayWriter(ValueWriter<T> elementWriter, DataType elementType) {
this.elementWriter = elementWriter;
this.elementType = elementType;
}
@Override
@SuppressWarnings("unchecked")
public void write(ArrayData array, Encoder encoder) throws IOException {
encoder.writeArrayStart();
int numElements = array.numElements();
encoder.setItemCount(numElements);
for (int i = 0; i < numElements; i += 1) {
encoder.startItem();
elementWriter.write((T) array.get(i, elementType), encoder);
}
encoder.writeArrayEnd();
}
}
private static class ArrayMapWriter<K, V> implements ValueWriter<MapData> {
private final ValueWriter<K> keyWriter;
private final ValueWriter<V> valueWriter;
private final DataType keyType;
private final DataType valueType;
private ArrayMapWriter(ValueWriter<K> keyWriter, DataType keyType,
ValueWriter<V> valueWriter, DataType valueType) {
this.keyWriter = keyWriter;
this.keyType = keyType;
this.valueWriter = valueWriter;
this.valueType = valueType;
}
@Override
@SuppressWarnings("unchecked")
public void write(MapData map, Encoder encoder) throws IOException {
encoder.writeArrayStart();
int numElements = map.numElements();
encoder.setItemCount(numElements);
ArrayData keyArray = map.keyArray();
ArrayData valueArray = map.valueArray();
for (int i = 0; i < numElements; i += 1) {
encoder.startItem();
keyWriter.write((K) keyArray.get(i, keyType), encoder);
valueWriter.write((V) valueArray.get(i, valueType), encoder);
}
encoder.writeArrayEnd();
}
}
private static class MapWriter<K, V> implements ValueWriter<MapData> {
private final ValueWriter<K> keyWriter;
private final ValueWriter<V> valueWriter;
private final DataType keyType;
private final DataType valueType;
private MapWriter(ValueWriter<K> keyWriter, DataType keyType,
ValueWriter<V> valueWriter, DataType valueType) {
this.keyWriter = keyWriter;
this.keyType = keyType;
this.valueWriter = valueWriter;
this.valueType = valueType;
}
@Override
@SuppressWarnings("unchecked")
public void write(MapData map, Encoder encoder) throws IOException {
encoder.writeMapStart();
int numElements = map.numElements();
encoder.setItemCount(numElements);
ArrayData keyArray = map.keyArray();
ArrayData valueArray = map.valueArray();
for (int i = 0; i < numElements; i += 1) {
encoder.startItem();
keyWriter.write((K) keyArray.get(i, keyType), encoder);
valueWriter.write((V) valueArray.get(i, valueType), encoder);
}
encoder.writeMapEnd();
}
}
static class StructWriter implements ValueWriter<InternalRow> {
final ValueWriter<?>[] writers;
private final DataType[] types;
@SuppressWarnings("unchecked")
private StructWriter(List<ValueWriter<?>> writers, List<DataType> types) {
this.writers = (ValueWriter<?>[]) Array.newInstance(ValueWriter.class, writers.size());
this.types = new DataType[writers.size()];
for (int i = 0; i < writers.size(); i += 1) {
this.writers[i] = writers.get(i);
this.types[i] = types.get(i);
}
}
@Override
public void write(InternalRow row, Encoder encoder) throws IOException {
for (int i = 0; i < types.length; i += 1) {
if (row.isNullAt(i)) {
writers[i].write(null, encoder);
} else {
write(row, i, writers[i], encoder);
}
}
}
@SuppressWarnings("unchecked")
private <T> void write(InternalRow row, int pos, ValueWriter<T> writer, Encoder encoder)
throws IOException {
writer.write((T) row.get(pos, types[pos]), encoder);
}
}
}
| 6,617 |
0 | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark/data/SparkValueReaders.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.iceberg.spark.data;
import com.google.common.collect.Lists;
import com.netflix.iceberg.avro.ValueReader;
import org.apache.avro.Schema;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.ResolvingDecoder;
import org.apache.avro.util.Utf8;
import org.apache.spark.sql.catalyst.InternalRow;
import org.apache.spark.sql.catalyst.expressions.GenericInternalRow;
import org.apache.spark.sql.catalyst.util.ArrayBasedMapData;
import org.apache.spark.sql.catalyst.util.ArrayData;
import org.apache.spark.sql.catalyst.util.GenericArrayData;
import org.apache.spark.sql.types.Decimal;
import org.apache.spark.unsafe.types.UTF8String;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.List;
import java.util.UUID;
public class SparkValueReaders {
static ValueReader<UTF8String> strings() {
return StringReader.INSTANCE;
}
static ValueReader<UTF8String> uuids() {
return UUIDReader.INSTANCE;
}
static ValueReader<Decimal> decimal(ValueReader<byte[]> unscaledReader, int scale) {
return new DecimalReader(unscaledReader, scale);
}
static ValueReader<ArrayData> array(ValueReader<?> elementReader) {
return new ArrayReader(elementReader);
}
static ValueReader<ArrayBasedMapData> arrayMap(ValueReader<?> keyReader,
ValueReader<?> valueReader) {
return new ArrayMapReader(keyReader, valueReader);
}
static ValueReader<ArrayBasedMapData> map(ValueReader<?> keyReader, ValueReader<?> valueReader) {
return new MapReader(keyReader, valueReader);
}
static ValueReader<InternalRow> struct(List<ValueReader<?>> readers) {
return new StructReader(readers);
}
private static class StringReader implements ValueReader<UTF8String> {
private static StringReader INSTANCE = new StringReader();
private StringReader() {
}
@Override
public UTF8String read(Decoder decoder, Object reuse) throws IOException {
// use the decoder's readString(Utf8) method because it may be a resolving decoder
Utf8 utf8 = null;
if (reuse instanceof UTF8String) {
utf8 = new Utf8(((UTF8String) reuse).getBytes());
}
Utf8 string = decoder.readString(utf8);
return UTF8String.fromBytes(string.getBytes(), 0, string.getByteLength());
// int length = decoder.readInt();
// byte[] bytes = new byte[length];
// decoder.readFixed(bytes, 0, length);
// return UTF8String.fromBytes(bytes);
}
}
private static class UUIDReader implements ValueReader<UTF8String> {
private static final ThreadLocal<ByteBuffer> BUFFER = ThreadLocal.withInitial(() -> {
ByteBuffer buffer = ByteBuffer.allocate(16);
buffer.order(ByteOrder.BIG_ENDIAN);
return buffer;
});
private static UUIDReader INSTANCE = new UUIDReader();
private UUIDReader() {
}
@Override
public UTF8String read(Decoder decoder, Object reuse) throws IOException {
ByteBuffer buffer = BUFFER.get();
buffer.rewind();
decoder.readFixed(buffer.array(), 0, 16);
long mostSigBits = buffer.getLong();
long leastSigBits = buffer.getLong();
return UTF8String.fromString(new UUID(mostSigBits, leastSigBits).toString());
}
}
private static class DecimalReader implements ValueReader<Decimal> {
private final ValueReader<byte[]> bytesReader;
private final int scale;
private DecimalReader(ValueReader<byte[]> bytesReader, int scale) {
this.bytesReader = bytesReader;
this.scale = scale;
}
@Override
public Decimal read(Decoder decoder, Object reuse) throws IOException {
byte[] bytes = bytesReader.read(decoder, null);
return Decimal.apply(new BigDecimal(new BigInteger(bytes), scale));
}
}
private static class ArrayReader implements ValueReader<ArrayData> {
private final ValueReader<?> elementReader;
private final List<Object> reusedList = Lists.newArrayList();
private ArrayReader(ValueReader<?> elementReader) {
this.elementReader = elementReader;
}
@Override
public GenericArrayData read(Decoder decoder, Object reuse) throws IOException {
reusedList.clear();
long chunkLength = decoder.readArrayStart();
while (chunkLength > 0) {
for (int i = 0; i < chunkLength; i += 1) {
reusedList.add(elementReader.read(decoder, null));
}
chunkLength = decoder.arrayNext();
}
// this will convert the list to an array so it is okay to reuse the list
return new GenericArrayData(reusedList.toArray());
}
}
private static class ArrayMapReader implements ValueReader<ArrayBasedMapData> {
private final ValueReader<?> keyReader;
private final ValueReader<?> valueReader;
private final List<Object> reusedKeyList = Lists.newArrayList();
private final List<Object> reusedValueList = Lists.newArrayList();
private ArrayMapReader(ValueReader<?> keyReader, ValueReader<?> valueReader) {
this.keyReader = keyReader;
this.valueReader = valueReader;
}
@Override
public ArrayBasedMapData read(Decoder decoder, Object reuse) throws IOException {
reusedKeyList.clear();
reusedValueList.clear();
long chunkLength = decoder.readArrayStart();
while (chunkLength > 0) {
for (int i = 0; i < chunkLength; i += 1) {
reusedKeyList.add(keyReader.read(decoder, null));
reusedValueList.add(valueReader.read(decoder, null));
}
chunkLength = decoder.arrayNext();
}
return new ArrayBasedMapData(
new GenericArrayData(reusedKeyList.toArray()),
new GenericArrayData(reusedValueList.toArray()));
}
}
private static class MapReader implements ValueReader<ArrayBasedMapData> {
private final ValueReader<?> keyReader;
private final ValueReader<?> valueReader;
private final List<Object> reusedKeyList = Lists.newArrayList();
private final List<Object> reusedValueList = Lists.newArrayList();
private MapReader(ValueReader<?> keyReader, ValueReader<?> valueReader) {
this.keyReader = keyReader;
this.valueReader = valueReader;
}
@Override
public ArrayBasedMapData read(Decoder decoder, Object reuse) throws IOException {
reusedKeyList.clear();
reusedValueList.clear();
long chunkLength = decoder.readMapStart();
while (chunkLength > 0) {
for (int i = 0; i < chunkLength; i += 1) {
reusedKeyList.add(keyReader.read(decoder, null));
reusedValueList.add(valueReader.read(decoder, null));
}
chunkLength = decoder.mapNext();
}
return new ArrayBasedMapData(
new GenericArrayData(reusedKeyList.toArray()),
new GenericArrayData(reusedValueList.toArray()));
}
}
static class StructReader implements ValueReader<InternalRow> {
final ValueReader<?>[] readers;
private StructReader(List<ValueReader<?>> readers) {
this.readers = new ValueReader[readers.size()];
for (int i = 0; i < this.readers.length; i += 1) {
this.readers[i] = readers.get(i);
}
}
@Override
public InternalRow read(Decoder decoder, Object reuse) throws IOException {
GenericInternalRow row = new GenericInternalRow(readers.length);
if (decoder instanceof ResolvingDecoder) {
// this may not set all of the fields. nulls are set by default.
for (Schema.Field field : ((ResolvingDecoder) decoder).readFieldOrder()) {
Object value = readers[field.pos()].read(decoder, null);
if (value != null) {
row.update(field.pos(), value);
} else {
row.setNullAt(field.pos());
}
}
} else {
for (int i = 0; i < readers.length; i += 1) {
Object value = readers[i].read(decoder, null);
if (value != null) {
row.update(i, value);
} else {
row.setNullAt(i);
}
}
}
return row;
}
}
}
| 6,618 |
0 | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark | Create_ds/iceberg/spark/src/main/java/com/netflix/iceberg/spark/data/SparkParquetReaders.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.iceberg.spark.data;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.netflix.iceberg.Schema;
import com.netflix.iceberg.parquet.ParquetValueReader;
import com.netflix.iceberg.parquet.ParquetValueReaders;
import com.netflix.iceberg.parquet.ParquetValueReaders.FloatAsDoubleReader;
import com.netflix.iceberg.parquet.ParquetValueReaders.IntAsLongReader;
import com.netflix.iceberg.parquet.ParquetValueReaders.PrimitiveReader;
import com.netflix.iceberg.parquet.ParquetValueReaders.RepeatedKeyValueReader;
import com.netflix.iceberg.parquet.ParquetValueReaders.RepeatedReader;
import com.netflix.iceberg.parquet.ParquetValueReaders.ReusableEntry;
import com.netflix.iceberg.parquet.ParquetValueReaders.StructReader;
import com.netflix.iceberg.parquet.ParquetValueReaders.UnboxedReader;
import com.netflix.iceberg.parquet.TypeWithSchemaVisitor;
import com.netflix.iceberg.types.Type.TypeID;
import com.netflix.iceberg.types.Types;
import org.apache.parquet.column.ColumnDescriptor;
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.schema.DecimalMetadata;
import org.apache.parquet.schema.GroupType;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.PrimitiveType;
import org.apache.parquet.schema.Type;
import org.apache.spark.sql.catalyst.InternalRow;
import org.apache.spark.sql.catalyst.expressions.GenericInternalRow;
import org.apache.spark.sql.catalyst.util.ArrayBasedMapData;
import org.apache.spark.sql.catalyst.util.ArrayData;
import org.apache.spark.sql.catalyst.util.GenericArrayData;
import org.apache.spark.sql.catalyst.util.MapData;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.Decimal;
import org.apache.spark.unsafe.types.CalendarInterval;
import org.apache.spark.unsafe.types.UTF8String;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static com.netflix.iceberg.parquet.ParquetSchemaUtil.hasIds;
import static com.netflix.iceberg.parquet.ParquetValueReaders.option;
public class SparkParquetReaders {
private SparkParquetReaders() {
}
@SuppressWarnings("unchecked")
public static ParquetValueReader<InternalRow> buildReader(Schema expectedSchema,
MessageType fileSchema) {
if (hasIds(fileSchema)) {
return (ParquetValueReader<InternalRow>)
TypeWithSchemaVisitor.visit(expectedSchema.asStruct(), fileSchema,
new ReadBuilder(fileSchema));
} else {
return (ParquetValueReader<InternalRow>)
TypeWithSchemaVisitor.visit(expectedSchema.asStruct(), fileSchema,
new FallbackReadBuilder(fileSchema));
}
}
private static class FallbackReadBuilder extends ReadBuilder {
FallbackReadBuilder(MessageType type) {
super(type);
}
@Override
public ParquetValueReader<?> message(Types.StructType expected, MessageType message, List<ParquetValueReader<?>> fieldReaders) {
// the top level matches by ID, but the remaining IDs are missing
return super.struct(expected, message, fieldReaders);
}
@Override
public ParquetValueReader<?> struct(Types.StructType ignored, GroupType struct, List<ParquetValueReader<?>> fieldReaders) {
// the expected struct is ignored because nested fields are never found when the
List<ParquetValueReader<?>> newFields = Lists.newArrayListWithExpectedSize(
fieldReaders.size());
List<Type> types = Lists.newArrayListWithExpectedSize(fieldReaders.size());
List<Type> fields = struct.getFields();
for (int i = 0; i < fields.size(); i += 1) {
Type fieldType = fields.get(i);
int fieldD = type.getMaxDefinitionLevel(path(fieldType.getName()))-1;
newFields.add(option(fieldType, fieldD, fieldReaders.get(i)));
types.add(fieldType);
}
return new InternalRowReader(types, newFields);
}
}
private static class ReadBuilder extends TypeWithSchemaVisitor<ParquetValueReader<?>> {
protected final MessageType type;
ReadBuilder(MessageType type) {
this.type = type;
}
@Override
public ParquetValueReader<?> message(Types.StructType expected, MessageType message,
List<ParquetValueReader<?>> fieldReaders) {
return struct(expected, message.asGroupType(), fieldReaders);
}
@Override
public ParquetValueReader<?> struct(Types.StructType expected, GroupType struct,
List<ParquetValueReader<?>> fieldReaders) {
// match the expected struct's order
Map<Integer, ParquetValueReader<?>> readersById = Maps.newHashMap();
Map<Integer, Type> typesById = Maps.newHashMap();
List<Type> fields = struct.getFields();
for (int i = 0; i < fields.size(); i += 1) {
Type fieldType = fields.get(i);
int fieldD = type.getMaxDefinitionLevel(path(fieldType.getName()))-1;
int id = fieldType.getId().intValue();
readersById.put(id, option(fieldType, fieldD, fieldReaders.get(i)));
typesById.put(id, fieldType);
}
List<Types.NestedField> expectedFields = expected != null ?
expected.fields() : ImmutableList.of();
List<ParquetValueReader<?>> reorderedFields = Lists.newArrayListWithExpectedSize(
expectedFields.size());
List<Type> types = Lists.newArrayListWithExpectedSize(expectedFields.size());
for (Types.NestedField field : expectedFields) {
int id = field.fieldId();
ParquetValueReader<?> reader = readersById.get(id);
if (reader != null) {
reorderedFields.add(reader);
types.add(typesById.get(id));
} else {
reorderedFields.add(ParquetValueReaders.nulls());
types.add(null);
}
}
return new InternalRowReader(types, reorderedFields);
}
@Override
public ParquetValueReader<?> list(Types.ListType expectedList, GroupType array,
ParquetValueReader<?> elementReader) {
GroupType repeated = array.getFields().get(0).asGroupType();
String[] repeatedPath = currentPath();
int repeatedD = type.getMaxDefinitionLevel(repeatedPath)-1;
int repeatedR = type.getMaxRepetitionLevel(repeatedPath)-1;
Type elementType = repeated.getType(0);
int elementD = type.getMaxDefinitionLevel(path(elementType.getName()))-1;
return new ArrayReader<>(repeatedD, repeatedR, option(elementType, elementD, elementReader));
}
@Override
public ParquetValueReader<?> map(Types.MapType expectedMap, GroupType map,
ParquetValueReader<?> keyReader,
ParquetValueReader<?> valueReader) {
GroupType repeatedKeyValue = map.getFields().get(0).asGroupType();
String[] repeatedPath = currentPath();
int repeatedD = type.getMaxDefinitionLevel(repeatedPath)-1;
int repeatedR = type.getMaxRepetitionLevel(repeatedPath)-1;
Type keyType = repeatedKeyValue.getType(0);
int keyD = type.getMaxDefinitionLevel(path(keyType.getName()))-1;
Type valueType = repeatedKeyValue.getType(1);
int valueD = type.getMaxDefinitionLevel(path(valueType.getName()))-1;
return new MapReader<>(repeatedD, repeatedR,
option(keyType, keyD, keyReader), option(valueType, valueD, valueReader));
}
@Override
public ParquetValueReader<?> primitive(com.netflix.iceberg.types.Type.PrimitiveType expected,
PrimitiveType primitive) {
ColumnDescriptor desc = type.getColumnDescription(currentPath());
if (primitive.getOriginalType() != null) {
switch (primitive.getOriginalType()) {
case ENUM:
case JSON:
case UTF8:
return new StringReader(desc);
case INT_8:
case INT_16:
case INT_32:
if (expected != null && expected.typeId() == Types.LongType.get().typeId()) {
return new IntAsLongReader(desc);
} else {
return new UnboxedReader(desc);
}
case DATE:
case INT_64:
case TIMESTAMP_MICROS:
return new UnboxedReader<>(desc);
case TIMESTAMP_MILLIS:
return new TimestampMillisReader(desc);
case DECIMAL:
DecimalMetadata decimal = primitive.getDecimalMetadata();
switch (primitive.getPrimitiveTypeName()) {
case BINARY:
case FIXED_LEN_BYTE_ARRAY:
return new BinaryDecimalReader(desc, decimal.getScale());
case INT64:
return new LongDecimalReader(desc, decimal.getPrecision(), decimal.getScale());
case INT32:
return new IntegerDecimalReader(desc, decimal.getPrecision(), decimal.getScale());
default:
throw new UnsupportedOperationException(
"Unsupported base type for decimal: " + primitive.getPrimitiveTypeName());
}
case BSON:
return new BytesReader(desc);
default:
throw new UnsupportedOperationException(
"Unsupported logical type: " + primitive.getOriginalType());
}
}
switch (primitive.getPrimitiveTypeName()) {
case FIXED_LEN_BYTE_ARRAY:
case BINARY:
return new BytesReader(desc);
case INT32:
if (expected != null && expected.typeId() == TypeID.LONG) {
return new IntAsLongReader(desc);
} else {
return new UnboxedReader<>(desc);
}
case FLOAT:
if (expected != null && expected.typeId() == TypeID.DOUBLE) {
return new FloatAsDoubleReader(desc);
} else {
return new UnboxedReader<>(desc);
}
case BOOLEAN:
case INT64:
case DOUBLE:
return new UnboxedReader<>(desc);
default:
throw new UnsupportedOperationException("Unsupported type: " + primitive);
}
}
private String[] currentPath() {
String[] path = new String[fieldNames.size()];
if (!fieldNames.isEmpty()) {
Iterator<String> iter = fieldNames.descendingIterator();
for (int i = 0; iter.hasNext(); i += 1) {
path[i] = iter.next();
}
}
return path;
}
protected String[] path(String name) {
String[] path = new String[fieldNames.size() + 1];
path[fieldNames.size()] = name;
if (!fieldNames.isEmpty()) {
Iterator<String> iter = fieldNames.descendingIterator();
for (int i = 0; iter.hasNext(); i += 1) {
path[i] = iter.next();
}
}
return path;
}
}
private static class BinaryDecimalReader extends PrimitiveReader<Decimal> {
private final int scale;
BinaryDecimalReader(ColumnDescriptor desc, int scale) {
super(desc);
this.scale = scale;
}
@Override
public Decimal read(Decimal ignored) {
Binary binary = column.nextBinary();
return Decimal.fromDecimal(new BigDecimal(new BigInteger(binary.getBytes()), scale));
}
}
private static class IntegerDecimalReader extends PrimitiveReader<Decimal> {
private final int precision;
private final int scale;
IntegerDecimalReader(ColumnDescriptor desc, int precision, int scale) {
super(desc);
this.precision = precision;
this.scale = scale;
}
@Override
public Decimal read(Decimal ignored) {
return Decimal.apply(column.nextInteger(), precision, scale);
}
}
private static class LongDecimalReader extends PrimitiveReader<Decimal> {
private final int precision;
private final int scale;
LongDecimalReader(ColumnDescriptor desc, int precision, int scale) {
super(desc);
this.precision = precision;
this.scale = scale;
}
@Override
public Decimal read(Decimal ignored) {
return Decimal.apply(column.nextLong(), precision, scale);
}
}
private static class TimestampMillisReader extends UnboxedReader<Long> {
TimestampMillisReader(ColumnDescriptor desc) {
super(desc);
}
@Override
public Long read(Long ignored) {
return readLong();
}
@Override
public long readLong() {
return 1000 * column.nextLong();
}
}
private static class StringReader extends PrimitiveReader<UTF8String> {
StringReader(ColumnDescriptor desc) {
super(desc);
}
@Override
public UTF8String read(UTF8String ignored) {
Binary binary = column.nextBinary();
ByteBuffer buffer = binary.toByteBuffer();
if (buffer.hasArray()) {
return UTF8String.fromBytes(
buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining());
} else {
return UTF8String.fromBytes(binary.getBytes());
}
}
}
private static class BytesReader extends PrimitiveReader<byte[]> {
BytesReader(ColumnDescriptor desc) {
super(desc);
}
@Override
public byte[] read(byte[] ignored) {
return column.nextBinary().getBytes();
}
}
private static class ArrayReader<E> extends RepeatedReader<ArrayData, ReusableArrayData, E> {
private int readPos = 0;
private int writePos = 0;
ArrayReader(int definitionLevel, int repetitionLevel, ParquetValueReader<E> reader) {
super(definitionLevel, repetitionLevel, reader);
}
@Override
@SuppressWarnings("unchecked")
protected ReusableArrayData newListData(ArrayData reuse) {
this.readPos = 0;
this.writePos = 0;
if (reuse instanceof ReusableArrayData) {
return (ReusableArrayData) reuse;
} else {
return new ReusableArrayData();
}
}
@Override
@SuppressWarnings("unchecked")
protected E getElement(ReusableArrayData list) {
E value = null;
if (readPos < list.capacity()) {
value = (E) list.values[readPos];
}
readPos += 1;
return value;
}
@Override
protected void addElement(ReusableArrayData reused, E element) {
if (writePos >= reused.capacity()) {
reused.grow();
}
reused.values[writePos] = element;
writePos += 1;
}
@Override
protected ArrayData buildList(ReusableArrayData list) {
list.setNumElements(writePos);
return list;
}
}
private static class MapReader<K, V> extends RepeatedKeyValueReader<MapData, ReusableMapData, K, V> {
private int readPos = 0;
private int writePos = 0;
private final ReusableEntry<K, V> entry = new ReusableEntry<>();
private final ReusableEntry<K, V> nullEntry = new ReusableEntry<>();
MapReader(int definitionLevel, int repetitionLevel,
ParquetValueReader<K> keyReader, ParquetValueReader<V> valueReader) {
super(definitionLevel, repetitionLevel, keyReader, valueReader);
}
@Override
@SuppressWarnings("unchecked")
protected ReusableMapData newMapData(MapData reuse) {
this.readPos = 0;
this.writePos = 0;
if (reuse instanceof ReusableMapData) {
return (ReusableMapData) reuse;
} else {
return new ReusableMapData();
}
}
@Override
@SuppressWarnings("unchecked")
protected Map.Entry<K, V> getPair(ReusableMapData map) {
Map.Entry<K, V> kv = nullEntry;
if (readPos < map.capacity()) {
entry.set((K) map.keys.values[readPos], (V) map.values.values[readPos]);
kv = entry;
}
readPos += 1;
return kv;
}
@Override
protected void addPair(ReusableMapData map, K key, V value) {
if (writePos >= map.capacity()) {
map.grow();
}
map.keys.values[writePos] = key;
map.values.values[writePos] = value;
writePos += 1;
}
@Override
protected MapData buildMap(ReusableMapData map) {
map.setNumElements(writePos);
return map;
}
}
private static class InternalRowReader extends StructReader<InternalRow, GenericInternalRow> {
private final int numFields;
InternalRowReader(List<Type> types, List<ParquetValueReader<?>> readers) {
super(types, readers);
this.numFields = readers.size();
}
@Override
protected GenericInternalRow newStructData(InternalRow reuse) {
if (reuse instanceof GenericInternalRow) {
return (GenericInternalRow) reuse;
} else {
return new GenericInternalRow(numFields);
}
}
@Override
protected Object getField(GenericInternalRow intermediate, int pos) {
return intermediate.genericGet(pos);
}
@Override
protected InternalRow buildStruct(GenericInternalRow struct) {
return struct;
}
@Override
protected void set(GenericInternalRow row, int pos, Object value) {
row.update(pos, value);
}
@Override
protected void setNull(GenericInternalRow row, int pos) {
row.setNullAt(pos);
}
@Override
protected void setBoolean(GenericInternalRow row, int pos, boolean value) {
row.setBoolean(pos, value);
}
@Override
protected void setInteger(GenericInternalRow row, int pos, int value) {
row.setInt(pos, value);
}
@Override
protected void setLong(GenericInternalRow row, int pos, long value) {
row.setLong(pos, value);
}
@Override
protected void setFloat(GenericInternalRow row, int pos, float value) {
row.setFloat(pos, value);
}
@Override
protected void setDouble(GenericInternalRow row, int pos, double value) {
row.setDouble(pos, value);
}
}
private static class ReusableMapData extends MapData {
private final ReusableArrayData keys;
private final ReusableArrayData values;
private int numElements;
private ReusableMapData() {
this.keys = new ReusableArrayData();
this.values = new ReusableArrayData();
}
private void grow() {
keys.grow();
values.grow();
}
private int capacity() {
return keys.capacity();
}
public void setNumElements(int numElements) {
this.numElements = numElements;
keys.setNumElements(numElements);
values.setNumElements(numElements);
}
@Override
public int numElements() {
return numElements;
}
@Override
public MapData copy() {
return new ArrayBasedMapData(keyArray().copy(), valueArray().copy());
}
@Override
public ReusableArrayData keyArray() {
return keys;
}
@Override
public ReusableArrayData valueArray() {
return values;
}
}
private static class ReusableArrayData extends ArrayData {
private static final Object[] EMPTY = new Object[0];
private Object[] values = EMPTY;
private int numElements = 0;
private void grow() {
if (values.length == 0) {
this.values = new Object[20];
} else {
Object[] old = values;
this.values = new Object[old.length << 2];
// copy the old array in case it has values that can be reused
System.arraycopy(old, 0, values, 0, old.length);
}
}
private int capacity() {
return values.length;
}
public void setNumElements(int numElements) {
this.numElements = numElements;
}
@Override
public Object get(int ordinal, DataType dataType) {
return values[ordinal];
}
@Override
public int numElements() {
return numElements;
}
@Override
public ArrayData copy() {
return new GenericArrayData(array());
}
@Override
public Object[] array() {
return Arrays.copyOfRange(values, 0, numElements);
}
// @Override
public void setNullAt(int i) {
values[i] = null;
}
// @Override
public void update(int ordinal, Object value) {
values[ordinal] = value;
}
@Override
public boolean isNullAt(int ordinal) {
return null == values[ordinal];
}
@Override
public boolean getBoolean(int ordinal) {
return (boolean) values[ordinal];
}
@Override
public byte getByte(int ordinal) {
return (byte) values[ordinal];
}
@Override
public short getShort(int ordinal) {
return (short) values[ordinal];
}
@Override
public int getInt(int ordinal) {
return (int) values[ordinal];
}
@Override
public long getLong(int ordinal) {
return (long) values[ordinal];
}
@Override
public float getFloat(int ordinal) {
return (float) values[ordinal];
}
@Override
public double getDouble(int ordinal) {
return (double) values[ordinal];
}
@Override
public Decimal getDecimal(int ordinal, int precision, int scale) {
return (Decimal) values[ordinal];
}
@Override
public UTF8String getUTF8String(int ordinal) {
return (UTF8String) values[ordinal];
}
@Override
public byte[] getBinary(int ordinal) {
return (byte[]) values[ordinal];
}
@Override
public CalendarInterval getInterval(int ordinal) {
return (CalendarInterval) values[ordinal];
}
@Override
public InternalRow getStruct(int ordinal, int numFields) {
return (InternalRow) values[ordinal];
}
@Override
public ArrayData getArray(int ordinal) {
return (ArrayData) values[ordinal];
}
@Override
public MapData getMap(int ordinal) {
return (MapData) values[ordinal];
}
}
}
| 6,619 |
0 | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/impl/src/main/java/org/eclipse/jifa/hda | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/impl/src/main/java/org/eclipse/jifa/hda/impl/VirtualClassItem.java | /********************************************************************************
* Copyright (c) 2020 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.hda.impl;
import org.eclipse.jifa.common.util.EscapeUtil;
import org.eclipse.mat.SnapshotException;
import org.eclipse.mat.query.Bytes;
import org.eclipse.mat.query.IContextObjectSet;
import org.eclipse.mat.query.IStructuredResult;
import org.eclipse.mat.snapshot.ISnapshot;
import org.eclipse.jifa.common.util.UseAccessor;
import org.eclipse.jifa.hda.api.AnalysisException;
import static org.eclipse.jifa.hda.api.Model.DominatorTree;
@UseAccessor
public class VirtualClassItem extends DominatorTree.ClassItem {
static final int COLUMN_LABEL = 0;
static final int COLUMN_OBJECTS = 1;
static final int COLUMN_SHALLOW = 2;
static final int COLUMN_RETAINED = 3;
static final int COLUMN_PERCENT = 4;
transient final ISnapshot snapshot;
transient final IStructuredResult results;
transient final Object e;
public VirtualClassItem(final ISnapshot snapshot, final IStructuredResult results, final Object e) {
this.snapshot = snapshot;
this.results = results;
this.e = e;
this.objectId = results.getContext(e).getObjectId();
}
@Override
public String getSuffix() {
return null;
}
@Override
public int getObjectId() {
return objectId;
}
@Override
public int getObjectType() {
try {
return HeapDumpAnalyzerImpl.typeOf(snapshot.getObject(objectId));
} catch (SnapshotException se) {
throw new AnalysisException(se);
}
}
@Override
public boolean isGCRoot() {
return snapshot.isGCRoot(objectId);
}
@Override
public String getLabel() {
return EscapeUtil.unescapeLabel((String) results.getColumnValue(e, COLUMN_LABEL));
}
@Override
public int getObjects() {
return (Integer) results.getColumnValue(e, COLUMN_OBJECTS);
}
@Override
public int[] getObjectIds() {
return ((IContextObjectSet) results.getContext(e)).getObjectIds();
}
@Override
public long getShallowSize() {
return ((Bytes) results.getColumnValue(e, COLUMN_SHALLOW)).getValue();
}
@Override
public long getRetainedSize() {
return ((Bytes) results.getColumnValue(e, COLUMN_RETAINED)).getValue();
}
@Override
public double getPercent() {
return (Double) results.getColumnValue(e, COLUMN_PERCENT);
}
} | 6,620 |
0 | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/impl/src/main/java/org/eclipse/jifa/hda | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/impl/src/main/java/org/eclipse/jifa/hda/impl/VirtualDefaultItem.java | /********************************************************************************
* Copyright (c) 2020 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.hda.impl;
import org.eclipse.jifa.common.util.EscapeUtil;
import org.eclipse.mat.SnapshotException;
import org.eclipse.mat.query.Bytes;
import org.eclipse.mat.query.IStructuredResult;
import org.eclipse.mat.snapshot.ISnapshot;
import org.eclipse.mat.snapshot.model.IObject;
import org.eclipse.jifa.common.util.UseAccessor;
import org.eclipse.jifa.hda.api.AnalysisException;
import static org.eclipse.jifa.hda.api.Model.DominatorTree;
@UseAccessor
public class VirtualDefaultItem extends DominatorTree.DefaultItem {
static final int COLUMN_LABEL = 0;
static final int COLUMN_SHALLOW = 1;
static final int COLUMN_RETAINED = 2;
static final int COLUMN_PERCENT = 3;
transient final ISnapshot snapshot;
transient final IStructuredResult results;
transient final Object e;
public VirtualDefaultItem(final ISnapshot snapshot, final IStructuredResult results, final Object e) {
this.snapshot = snapshot;
this.results = results;
this.e = e;
this.objectId = results.getContext(e).getObjectId();
}
@Override
public String getSuffix() {
try {
IObject object = snapshot.getObject(objectId);
return Helper.suffix(object.getGCRootInfo());
} catch (SnapshotException se) {
throw new AnalysisException(se);
}
}
@Override
public int getObjectId() {
return objectId;
}
@Override
public int getObjectType() {
try {
return HeapDumpAnalyzerImpl.typeOf(snapshot.getObject(objectId));
} catch (SnapshotException se) {
throw new AnalysisException(se);
}
}
@Override
public boolean isGCRoot() {
return snapshot.isGCRoot(objectId);
}
@Override
public String getLabel() {
return EscapeUtil.unescapeLabel((String) results.getColumnValue(e, COLUMN_LABEL));
}
@Override
public long getShallowSize() {
return ((Bytes) results.getColumnValue(e, COLUMN_SHALLOW)).getValue();
}
@Override
public long getRetainedSize() {
return ((Bytes) results.getColumnValue(e, COLUMN_RETAINED)).getValue();
}
@Override
public double getPercent() {
return (Double) results.getColumnValue(e, COLUMN_PERCENT);
}
} | 6,621 |
0 | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/impl/src/main/java/org/eclipse/jifa/hda | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/impl/src/main/java/org/eclipse/jifa/hda/impl/AnalysisContext.java | /********************************************************************************
* Copyright (c) 2021 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.hda.impl;
import org.eclipse.jifa.hda.api.Model;
import org.eclipse.mat.query.IResult;
import org.eclipse.mat.query.IResultTree;
import org.eclipse.mat.query.refined.RefinedTable;
import org.eclipse.mat.snapshot.ISnapshot;
import java.lang.ref.SoftReference;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
public class AnalysisContext {
final ISnapshot snapshot;
volatile SoftReference<ClassLoaderExplorerData> classLoaderExplorerData = new SoftReference<>(null);
volatile SoftReference<DirectByteBufferData> directByteBufferData = new SoftReference<>(null);
volatile SoftReference<LeakReportData> leakReportData= new SoftReference<>(null);
AnalysisContext(ISnapshot snapshot) {
this.snapshot = snapshot;
}
static class ClassLoaderExplorerData {
IResultTree result;
// classloader object Id -> record
Map<Integer, Object> classLoaderIdMap;
List<?> items;
int definedClasses;
int numberOfInstances;
}
static class DirectByteBufferData {
static final String OQL =
"SELECT s.@displayName as label, s.position as position, s.limit as limit, s.capacity as " +
"capacity FROM java.nio.DirectByteBuffer s where s.cleaner != null";
static final Map<String, Object> ARGS = new HashMap<>(1);
static {
ARGS.put("queryString", OQL);
}
RefinedTable resultContext;
Model.DirectByteBuffer.Summary summary;
public String label(Object row) {
return (String) resultContext.getColumnValue(row, 0);
}
public int position(Object row) {
return (Integer) resultContext.getColumnValue(row, 1);
}
public int limit(Object row) {
return (Integer) resultContext.getColumnValue(row, 2);
}
public int capacity(Object row) {
return (Integer) resultContext.getColumnValue(row, 3);
}
}
static class LeakReportData {
IResult result;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
AnalysisContext that = (AnalysisContext) o;
return Objects.equals(snapshot, that.snapshot);
}
@Override
public int hashCode() {
return Objects.hash(snapshot);
}
}
| 6,622 |
0 | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/impl/src/main/java/org/eclipse/jifa/hda | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/impl/src/main/java/org/eclipse/jifa/hda/impl/ExoticTreeFinder.java | /********************************************************************************
* Copyright (c) 2020, 2021 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.hda.impl;
import org.eclipse.mat.query.IResultTree;
import java.util.List;
import java.util.function.Function;
import static org.eclipse.jifa.common.util.Assertion.ASSERT;
// find elements in this exotic tree
// MAT's APIs really astonished me, I'm climbing the s*** mountains of unbelievable awful smell;
// 2020-12-11
public class ExoticTreeFinder {
private final IResultTree tree;
private BinFunction<IResultTree, Object, Integer> predicate;
private Function<Object, List<?>> getChildrenCallback;
public ExoticTreeFinder(IResultTree tree) {
ASSERT.notNull(tree);
this.tree = tree;
}
public ExoticTreeFinder setGetChildrenCallback(Function<Object, List<?>> getChildrenCallback) {
this.getChildrenCallback = getChildrenCallback;
return this;
}
public ExoticTreeFinder setPredicate(BinFunction<IResultTree, Object, Integer> predicate) {
this.predicate = predicate;
return this;
}
public List<?> findChildrenOf(int parentNodeId) {
Object targetParentNode = null;
try {
targetParentNode = findTargetParentNodeImpl(tree.getElements(), parentNodeId);
} catch (Exception e) {
e.printStackTrace();
}
if (targetParentNode != null) {
return getChildrenCallback.apply(targetParentNode);
}
return null;
}
public Object findTargetParentNode(int parentNodeId) {
try {
return findTargetParentNodeImpl(tree.getElements(), parentNodeId);
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
private Object findTargetParentNodeImpl(List<?> nodes, int parentNodeId) throws Exception {
if (nodes == null) {
return null;
}
for (Object node : nodes) {
Integer nodeId = predicate.apply(tree, node);
if (nodeId != null && nodeId == parentNodeId) {
return node;
}
}
for (Object node : nodes) {
List<?> children = getChildrenCallback.apply(node);
if (children != null) {
Object targetParentNode = findTargetParentNodeImpl(children, parentNodeId);
if (targetParentNode != null) {
return targetParentNode;
}
}
}
return null;
}
public interface BinFunction<A, B, R> {
R apply(A a, B b) throws Exception;
}
}
| 6,623 |
0 | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/impl/src/main/java/org/eclipse/jifa/hda | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/impl/src/main/java/org/eclipse/jifa/hda/impl/Helper.java | /********************************************************************************
* Copyright (c) 2020, 2021 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.hda.impl;
import org.eclipse.mat.SnapshotException;
import org.eclipse.mat.query.IContextObject;
import org.eclipse.mat.query.IResultTree;
import org.eclipse.mat.snapshot.ISnapshot;
import org.eclipse.mat.snapshot.model.GCRootInfo;
import org.eclipse.mat.snapshot.model.IObject;
import org.eclipse.mat.snapshot.model.NamedReference;
import org.eclipse.mat.snapshot.query.IHeapObjectArgument;
import org.eclipse.mat.util.IProgressListener;
import org.eclipse.mat.util.VoidProgressListener;
import java.util.Iterator;
import java.util.List;
import static org.eclipse.jifa.common.Constant.EMPTY_STRING;
import static org.eclipse.jifa.common.util.Assertion.ASSERT;
public class Helper {
public static final int ILLEGAL_OBJECT_ID = -1;
public static IProgressListener VOID_LISTENER = new VoidProgressListener();
public static int fetchObjectId(IContextObject context) {
return context == null ? ILLEGAL_OBJECT_ID : context.getObjectId();
}
public static String suffix(ISnapshot snapshot, int objectId) throws SnapshotException {
GCRootInfo[] gc = snapshot.getGCRootInfo(objectId);
return gc != null ? GCRootInfo.getTypeSetAsString(gc) : EMPTY_STRING;
}
public static String suffix(GCRootInfo[] gcRootInfo) {
return gcRootInfo != null ? GCRootInfo.getTypeSetAsString(gcRootInfo) : EMPTY_STRING;
}
public static String prefix(ISnapshot snapshot, int objectId, int outbound) throws SnapshotException {
IObject object = snapshot.getObject(objectId);
long address = snapshot.mapIdToAddress(outbound);
StringBuilder s = new StringBuilder(64);
List<NamedReference> refs = object.getOutboundReferences();
for (NamedReference reference : refs) {
if (reference.getObjectAddress() == address) {
if (s.length() > 0) {
s.append(", ");
}
s.append(reference.getName());
}
}
return s.toString();
}
public static IHeapObjectArgument buildHeapObjectArgument(int[] ids) {
return new IHeapObjectArgument() {
@Override
public int[] getIds(IProgressListener iProgressListener) {
return ids;
}
@Override
public String getLabel() {
return "";
}
@Override
public Iterator<int[]> iterator() {
return new Iterator<int[]>() {
boolean hasNext = true;
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public int[] next() {
ASSERT.isTrue(hasNext);
hasNext = false;
return ids;
}
};
}
};
}
private static Object findObjectInTree(IResultTree tree, List<?> levelElements, int targetId) {
if (levelElements != null) {
for (Object o : levelElements) {
if (tree.getContext(o).getObjectId() == targetId) {
return o;
}
}
}
return null;
}
public static Object fetchObjectInResultTree(IResultTree tree, int[] idPathInResultTree) {
if (idPathInResultTree == null || idPathInResultTree.length == 0) {
return null;
}
// find the object in root tree
Object objectInTree = findObjectInTree(tree, tree.getElements(), idPathInResultTree[0]);
// find the object in children tree
for (int i = 1; i < idPathInResultTree.length; i++) {
if (objectInTree == null) {
return null;
}
objectInTree = findObjectInTree(tree, tree.getChildren(objectInTree), idPathInResultTree[i]);
}
return objectInTree;
}
}
| 6,624 |
0 | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/impl/src/main/java/org/eclipse/jifa/hda | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/impl/src/main/java/org/eclipse/jifa/hda/impl/VirtualThreadItem.java | /********************************************************************************
* Copyright (c) 2021 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.hda.impl;
import org.eclipse.mat.SnapshotException;
import org.eclipse.mat.query.Bytes;
import org.eclipse.mat.query.IResultTree;
import org.eclipse.mat.snapshot.ISnapshot;
import org.eclipse.mat.snapshot.model.IObject;
import org.eclipse.jifa.common.util.UseAccessor;
import org.eclipse.jifa.hda.api.AnalysisException;
import static org.eclipse.jifa.hda.api.Model.Thread;
@UseAccessor
public class VirtualThreadItem extends Thread.Item {
static final int COLUMN_OBJECT = 0;
static final int COLUMN_NAME = 1;
static final int COLUMN_SHALLOW = 2;
static final int COLUMN_RETAINED = 3;
static final int COLUMN_CONTEXT_CLASS_LOADER = 4;
// changes depending on MAT report results
final int COLUMN_DAEMON;
transient final IResultTree result;
transient final Object row;
public VirtualThreadItem(final IResultTree result, final Object row) {
this.row = row;
this.result = result;
this.objectId = result.getContext(row).getObjectId();
// the report changed a little in MAT:
// Bug 572596 Add maximum retained heap size to thread overview stack
// a row was injected at column position 5, so the daemon column may have been
// pushed out to column 6
boolean includesMaxLocalRetained = (result.getColumns().length == 10);
this.COLUMN_DAEMON = includesMaxLocalRetained ? 6 : 5;
}
@Override
public int getObjectId() {
return objectId;
}
@Override
public String getObject() {
return (String) result.getColumnValue(row, COLUMN_OBJECT);
}
@Override
public String getName() {
return (String) result.getColumnValue(row, COLUMN_NAME);
}
@Override
public long getShallowSize() {
return ((Bytes) result.getColumnValue(row, COLUMN_SHALLOW)).getValue();
}
@Override
public long getRetainedSize() {
return ((Bytes) result.getColumnValue(row, COLUMN_RETAINED)).getValue();
}
@Override
public String getContextClassLoader() {
return (String) result.getColumnValue(row, COLUMN_CONTEXT_CLASS_LOADER);
}
@Override
public boolean isHasStack() {
return (Boolean) result.hasChildren(row);
}
@Override
public boolean isDaemon() {
return (Boolean) result.getColumnValue(row, COLUMN_DAEMON);
}
} | 6,625 |
0 | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/impl/src/main/java/org/eclipse/jifa/hda | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/impl/src/main/java/org/eclipse/jifa/hda/impl/Activator.java | /********************************************************************************
* Copyright (c) 2021 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.hda.impl;
import org.eclipse.jifa.hda.api.HeapDumpAnalyzer;
import org.osgi.framework.BundleActivator;
import org.osgi.framework.BundleContext;
import java.util.Hashtable;
public class Activator implements BundleActivator {
@Override
public void start(BundleContext bundleContext) {
bundleContext
.registerService(HeapDumpAnalyzer.Provider.class, HeapDumpAnalyzerImpl.PROVIDER, new Hashtable<>());
}
@Override
public void stop(BundleContext bundleContext) {
}
}
| 6,626 |
0 | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/impl/src/main/java/org/eclipse/jifa/hda | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/impl/src/main/java/org/eclipse/jifa/hda/impl/HeapDumpAnalyzerImpl.java | /********************************************************************************
* Copyright (c) 2021, 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.hda.impl;
import org.eclipse.jifa.common.Constant;
import org.eclipse.jifa.common.JifaException;
import org.eclipse.jifa.common.cache.Cacheable;
import org.eclipse.jifa.common.listener.ProgressListener;
import org.eclipse.jifa.common.request.PagingRequest;
import org.eclipse.jifa.common.util.EscapeUtil;
import org.eclipse.jifa.common.util.PageViewBuilder;
import org.eclipse.jifa.common.util.ReflectionUtil;
import org.eclipse.jifa.common.vo.PageView;
import org.eclipse.jifa.common.vo.support.SearchPredicate;
import org.eclipse.jifa.common.vo.support.SearchType;
import org.eclipse.jifa.common.vo.support.SortTableGenerator;
import org.eclipse.jifa.hda.api.AnalysisException;
import org.eclipse.jifa.hda.api.HeapDumpAnalyzer;
import org.eclipse.jifa.hda.api.Model;
import org.eclipse.mat.SnapshotException;
import org.eclipse.mat.hprof.extension.HprofPreferencesAccess;
import org.eclipse.mat.hprof.ui.HprofPreferences;
import org.eclipse.mat.internal.snapshot.SnapshotQueryContext;
import org.eclipse.mat.parser.model.ClassImpl;
import org.eclipse.mat.parser.model.XClassHistogramRecord;
import org.eclipse.mat.parser.model.XClassLoaderHistogramRecord;
import org.eclipse.mat.query.Bytes;
import org.eclipse.mat.query.Column;
import org.eclipse.mat.query.IContextObject;
import org.eclipse.mat.query.IContextObjectSet;
import org.eclipse.mat.query.IDecorator;
import org.eclipse.mat.query.IIconProvider;
import org.eclipse.mat.query.IResult;
import org.eclipse.mat.query.IResultPie;
import org.eclipse.mat.query.IResultTable;
import org.eclipse.mat.query.IResultTree;
import org.eclipse.mat.query.refined.RefinedResultBuilder;
import org.eclipse.mat.query.refined.RefinedTable;
import org.eclipse.mat.query.refined.RefinedTree;
import org.eclipse.mat.query.results.CompositeResult;
import org.eclipse.mat.query.results.TextResult;
import org.eclipse.mat.report.QuerySpec;
import org.eclipse.mat.report.SectionSpec;
import org.eclipse.mat.report.Spec;
import org.eclipse.mat.snapshot.ClassHistogramRecord;
import org.eclipse.mat.snapshot.Histogram;
import org.eclipse.mat.snapshot.HistogramRecord;
import org.eclipse.mat.snapshot.IPathsFromGCRootsComputer;
import org.eclipse.mat.snapshot.ISnapshot;
import org.eclipse.mat.snapshot.SnapshotFactory;
import org.eclipse.mat.snapshot.SnapshotInfo;
import org.eclipse.mat.snapshot.UnreachableObjectsHistogram;
import org.eclipse.mat.snapshot.model.Field;
import org.eclipse.mat.snapshot.model.GCRootInfo;
import org.eclipse.mat.snapshot.model.IClass;
import org.eclipse.mat.snapshot.model.IClassLoader;
import org.eclipse.mat.snapshot.model.IInstance;
import org.eclipse.mat.snapshot.model.IObject;
import org.eclipse.mat.snapshot.model.IObjectArray;
import org.eclipse.mat.snapshot.model.IPrimitiveArray;
import org.eclipse.mat.snapshot.model.ObjectReference;
import org.eclipse.mat.snapshot.query.Icons;
import org.eclipse.mat.snapshot.query.SnapshotQuery;
import java.lang.ref.SoftReference;
import java.net.URL;
import java.nio.file.Path;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import static org.eclipse.jifa.common.listener.ProgressListener.NoOpProgressListener;
import static org.eclipse.jifa.common.vo.support.SearchPredicate.createPredicate;
import static org.eclipse.jifa.hda.api.Model.*;
import static org.eclipse.jifa.hda.impl.AnalysisContext.ClassLoaderExplorerData;
import static org.eclipse.jifa.hda.impl.AnalysisContext.DirectByteBufferData;
public class HeapDumpAnalyzerImpl implements HeapDumpAnalyzer {
static final Provider PROVIDER = new ProviderImpl();
private final AnalysisContext context;
public HeapDumpAnalyzerImpl(AnalysisContext context) {
this.context = context;
}
public static int typeOf(IObject object) {
if (object instanceof IClass) {
return JavaObject.CLASS_TYPE;
}
if (object instanceof IClassLoader) {
return JavaObject.CLASS_LOADER_TYPE;
}
if (object.getClazz().isArrayType()) {
return JavaObject.ARRAY_TYPE;
}
return JavaObject.NORMAL_TYPE;
}
private static int getClassReferrerType(URL icon) {
if (icon == Icons.CLASS_IN || icon == Icons.CLASS_OUT) {
return ClassReferrer.Type.NEW;
} else if (icon == Icons.CLASS_IN_MIXED || icon == Icons.CLASS_OUT_MIXED) {
return ClassReferrer.Type.MIXED;
} else if (icon == Icons.CLASS_IN_OLD || icon == Icons.CLASS_OUT_OLD) {
return ClassReferrer.Type.OLD_FAD;
}
throw new AnalysisException("Should not reach here");
}
private static Map<IClass, Set<String>> convert(AnalysisContext context,
List<String> excludes) throws SnapshotException {
Map<IClass, Set<String>> excludeMap = null;
if (excludes != null && !excludes.isEmpty()) {
excludeMap = new HashMap<>();
for (String entry : excludes) {
String pattern = entry;
Set<String> fields = null;
int colon = entry.indexOf(':');
if (colon >= 0) {
fields = new HashSet<>();
StringTokenizer tokens = new StringTokenizer(entry.substring(colon + 1), ",");
while (tokens.hasMoreTokens())
fields.add(tokens.nextToken());
pattern = pattern.substring(0, colon);
}
for (IClass clazz : context.snapshot.getClassesByName(Pattern.compile(pattern), true))
excludeMap.put(clazz, fields);
}
}
return excludeMap;
}
private static <V> V $(RV<V> rv) {
try {
return rv.run();
} catch (Throwable t) {
throw new AnalysisException(t);
}
}
private void $(R e) {
$(() -> {
e.run();
return null;
});
}
@Override
public void dispose() {
$(() -> SnapshotFactory.dispose(context.snapshot));
}
@Override
public Overview.Details getDetails() {
return $(() -> {
SnapshotInfo snapshotInfo = context.snapshot.getSnapshotInfo();
return new Overview.Details(snapshotInfo.getJvmInfo(), snapshotInfo.getIdentifierSize(),
snapshotInfo.getCreationDate().getTime(), snapshotInfo.getNumberOfObjects(),
snapshotInfo.getNumberOfGCRoots(), snapshotInfo.getNumberOfClasses(),
snapshotInfo.getNumberOfClassLoaders(), snapshotInfo.getUsedHeapSize(),
false);
}
);
}
private <Res extends IResult> Res queryByCommand(AnalysisContext context,
String command) throws SnapshotException {
return queryByCommand(context, command, null, NoOpProgressListener);
}
@Cacheable
protected <Res extends IResult> Res queryByCommand(AnalysisContext context,
String command,
Map<String, Object> args) throws SnapshotException {
return queryByCommand(context, command, args, NoOpProgressListener);
}
private <Res extends IResult> Res queryByCommand(AnalysisContext context, String command,
ProgressListener listener) throws SnapshotException {
return queryByCommand(context, command, null, listener);
}
@SuppressWarnings("unchecked")
private <Res extends IResult> Res queryByCommand(AnalysisContext context, String command,
Map<String, Object> args,
ProgressListener listener) throws SnapshotException {
SnapshotQuery query = SnapshotQuery.parse(command, context.snapshot);
if (args != null) {
args.forEach((k, v) -> $(() -> query.setArgument(k, v)));
}
return (Res) query.execute(new ProgressListenerImpl(listener));
}
@Override
public Map<String, String> getSystemProperties() {
return $(() -> {
IResultTable result = queryByCommand(context, "system_properties");
Map<String, String> map = new HashMap<>();
int count = result.getRowCount();
for (int i = 0; i < count; i++) {
Object row = result.getRow(i);
map.put((String) result.getColumnValue(row, 1), (String) result.getColumnValue(row, 2));
}
return map;
});
}
@Override
public JavaObject getObjectInfo(int objectId) {
return $(() -> {
JavaObject ho = new JavaObject();
IObject object = context.snapshot.getObject(objectId);
ho.setObjectId(objectId);
ho.setLabel(EscapeUtil.unescapeLabel(object.getDisplayName()));
ho.setShallowSize(object.getUsedHeapSize());
ho.setRetainedSize(object.getRetainedHeapSize());
ho.setObjectType(typeOf(object));
ho.setGCRoot(context.snapshot.isGCRoot(objectId));
ho.setHasOutbound(true);
ho.setSuffix(Helper.suffix(context.snapshot, objectId));
return ho;
});
}
@Override
public InspectorView getInspectorView(int objectId) {
return $(() -> {
InspectorView view = new InspectorView();
ISnapshot snapshot = context.snapshot;
IObject object = snapshot.getObject(objectId);
view.setObjectAddress(object.getObjectAddress());
IClass iClass = object instanceof IClass ? (IClass) object : object.getClazz();
view.setName(iClass.getName());
view.setObjectType(typeOf(object));
view.setGCRoot(snapshot.isGCRoot(objectId));
// class name and address of the object
IClass clazz = object.getClazz();
view.setClassLabel(clazz.getTechnicalName());
view.setClassGCRoot(clazz.getGCRootInfo() != null);
// super class name
if (iClass.getSuperClass() != null) {
view.setSuperClassName(iClass.getSuperClass().getName());
}
// class loader name and address
IObject classLoader = snapshot.getObject(iClass.getClassLoaderId());
view.setClassLoaderLabel(classLoader.getTechnicalName());
view.setClassLoaderGCRoot(classLoader.getGCRootInfo() != null);
view.setShallowSize(object.getUsedHeapSize());
view.setRetainedSize(object.getRetainedHeapSize());
// gc root
GCRootInfo[] gcRootInfo = object.getGCRootInfo();
view.setGcRootInfo(
gcRootInfo != null ? "GC root: " + GCRootInfo.getTypeSetAsString(object.getGCRootInfo())
: "no GC root");
return view;
});
}
private String getObjectValue(IObject o) {
String text = o.getClassSpecificName();
return text != null ? EscapeUtil.unescapeJava(text) : o.getTechnicalName();
}
private PageView<Model.FieldView> buildPageViewOfFields(List<Field> fields, int page, int pageSize) {
return PageViewBuilder.build(fields, new PagingRequest(page, pageSize), field -> {
Model.FieldView fv = new Model.FieldView();
fv.fieldType = field.getType();
fv.name = field.getName();
Object value = field.getValue();
if (value instanceof ObjectReference) {
try {
fv.objectId = ((ObjectReference) value).getObjectId();
fv.value = getObjectValue(((ObjectReference) value).getObject());
} catch (SnapshotException e) {
throw new AnalysisException(e);
}
} else if (value != null) {
fv.value = value.toString();
}
return fv;
});
}
@Override
public PageView<Model.FieldView> getFields(int objectId, int page, int pageSize) {
return $(() -> {
ISnapshot snapshot = context.snapshot;
IObject object = snapshot.getObject(objectId);
PagingRequest pagingRequest = new PagingRequest(page, pageSize);
if (object instanceof IPrimitiveArray) {
List<Model.FieldView> fvs = new ArrayList<>();
IPrimitiveArray pa = (IPrimitiveArray) object;
int firstIndex = (pagingRequest.getPage() - 1) * pagingRequest.getPageSize();
int lastIndex = Math.min(firstIndex + pagingRequest.getPageSize(), pa.getLength());
for (int i = firstIndex; i < lastIndex; i++) {
fvs.add(new Model.FieldView(pa.getType(), "[" + i + "]", pa.getValueAt(i).toString()));
}
return new PageView<>(pagingRequest, pa.getLength(), fvs);
} else if (object instanceof IObjectArray) {
List<Model.FieldView> fvs = new ArrayList<>();
IObjectArray oa = (IObjectArray) object;
int firstIndex = (pagingRequest.getPage() - 1) * pagingRequest.getPageSize();
int lastIndex = Math.min(firstIndex + pagingRequest.getPageSize(), oa.getLength());
for (int i = firstIndex; i < lastIndex; i++) {
long[] refs = oa.getReferenceArray(i, 1);
int refObjectId = 0;
if (refs[0] != 0) {
refObjectId = snapshot.mapAddressToId(refs[0]);
}
String value = null;
if (refObjectId != 0) {
value = getObjectValue(snapshot.getObject(refObjectId));
}
fvs.add(new Model.FieldView(IObject.Type.OBJECT, "[" + i + "]", value, refObjectId));
}
return new PageView<>(pagingRequest, oa.getLength(), fvs);
}
List<Field> fields = new ArrayList<>();
boolean isClass = object instanceof IClass;
IClass clazz = isClass ? (IClass) object : object.getClazz();
if (object instanceof IInstance) {
fields.addAll(((IInstance) object).getFields());
} else if (object instanceof IClass) {
do {
List<Field> staticFields = clazz.getStaticFields();
for (Field staticField : staticFields) {
if (staticField.getName().startsWith("<")) {
fields.add(staticField);
}
}
} while ((clazz = clazz.getSuperClass()) != null);
}
return buildPageViewOfFields(fields, page, pageSize);
});
}
@Override
public PageView<Model.FieldView> getStaticFields(int objectId, int page,
int pageSize) {
return $(() -> {
ISnapshot snapshot = context.snapshot;
IObject object = snapshot.getObject(objectId);
boolean isClass = object instanceof IClass;
IClass clazz = isClass ? (IClass) object : object.getClazz();
List<Field> fields = new ArrayList<>();
do {
List<Field> staticFields = clazz.getStaticFields();
for (Field staticField : staticFields) {
if (!staticField.getName().startsWith("<")) {
fields.add(staticField);
}
}
} while (!isClass && (clazz = clazz.getSuperClass()) != null);
return buildPageViewOfFields(fields, page, pageSize);
});
}
@Override
public int mapAddressToId(long address) {
return $(() -> context.snapshot.mapAddressToId(address));
}
@Override
public String getObjectValue(int objectId) {
return $(() -> {
IObject object = context.snapshot.getObject(objectId);
String text = object.getClassSpecificName();
return text != null ? EscapeUtil.unescapeJava(text) : Constant.EMPTY_STRING;
});
}
@Override
public List<Overview.BigObject> getBigObjects() {
return $(() -> {
IResultPie result = queryByCommand(context, "pie_biggest_objects");
List<? extends IResultPie.Slice> slices = result.getSlices();
return slices
.stream()
.map(slice -> new Overview.BigObject(slice.getLabel(), slice.getContext() != null
? slice.getContext().getObjectId() :
Helper.ILLEGAL_OBJECT_ID,
slice.getValue(), slice.getDescription()))
.collect(Collectors.toList());
});
}
private ClassLoaderExplorerData queryClassLoader(AnalysisContext context) throws Exception {
ClassLoaderExplorerData classLoaderExplorerData = context.classLoaderExplorerData.get();
if (classLoaderExplorerData != null) {
return classLoaderExplorerData;
}
//noinspection SynchronizationOnLocalVariableOrMethodParameter
synchronized (context) {
classLoaderExplorerData = context.classLoaderExplorerData.get();
if (classLoaderExplorerData != null) {
return classLoaderExplorerData;
}
IResultTree result = queryByCommand(context, "ClassLoaderExplorerQuery");
classLoaderExplorerData = new ClassLoaderExplorerData();
classLoaderExplorerData.result = result;
Map<Integer, Object> classLoaderIdMap = new HashMap<>();
for (Object r : result.getElements()) {
classLoaderIdMap.put(result.getContext(r).getObjectId(), r);
}
classLoaderExplorerData.classLoaderIdMap = classLoaderIdMap;
classLoaderExplorerData.items = result.getElements();
classLoaderExplorerData.items.sort((Comparator<Object>) (o1, o2) -> Integer
.compare((int) result.getColumnValue(o2, 1), (int) result.getColumnValue(o1, 1)));
for (Object item : classLoaderExplorerData.items) {
classLoaderExplorerData.definedClasses += (int) result.getColumnValue(item, 1);
classLoaderExplorerData.numberOfInstances += (int) result.getColumnValue(item, 2);
}
context.classLoaderExplorerData = new SoftReference<>(classLoaderExplorerData);
return classLoaderExplorerData;
}
}
@Override
public Model.ClassLoader.Summary getSummaryOfClassLoaders() {
return $(() -> {
ClassLoaderExplorerData data = queryClassLoader(context);
Model.ClassLoader.Summary summary =
new Model.ClassLoader.Summary();
summary.setTotalSize(data.items.size());
summary.setDefinedClasses(data.definedClasses);
summary.setNumberOfInstances(data.numberOfInstances);
return summary;
});
}
@Override
public PageView<Model.ClassLoader.Item> getClassLoaders(int page, int pageSize) {
return $(() -> {
ClassLoaderExplorerData data = queryClassLoader(context);
IResultTree result = data.result;
return PageViewBuilder.build(data.items, new PagingRequest(page, pageSize), e -> {
Model.ClassLoader.Item r = new Model.ClassLoader.Item();
r.setObjectId(result.getContext(e).getObjectId());
r.setPrefix(((IDecorator) result).prefix(e));
r.setLabel((String) result.getColumnValue(e, 0));
r.setDefinedClasses((Integer) result.getColumnValue(e, 1));
r.setNumberOfInstances((Integer) result.getColumnValue(e, 2));
r.setClassLoader(true);
// FIXME
r.setHasParent(false);
return r;
});
});
}
@Override
public PageView<Model.ClassLoader.Item> getChildrenOfClassLoader(int classLoaderId, int page, int pageSize) {
return $(() -> {
ClassLoaderExplorerData data = queryClassLoader(context);
IResultTree result = data.result;
Object o = data.classLoaderIdMap.get(classLoaderId);
List<?> children = result.getChildren(o);
return PageViewBuilder.build(children, new PagingRequest(page, pageSize), e -> {
Model.ClassLoader.Item r = new Model.ClassLoader.Item();
r.setObjectId(result.getContext(e).getObjectId());
r.setPrefix(((IDecorator) result).prefix(e));
r.setLabel((String) result.getColumnValue(e, 0));
r.setNumberOfInstances((Integer) result.getColumnValue(e, 2));
if (!(e instanceof IClass)) {
r.setClassLoader(true);
r.setDefinedClasses((Integer) result.getColumnValue(e, 1));
// FIXME
r.setHasParent(false);
}
return r;
});
});
}
@Override
public UnreachableObject.Summary getSummaryOfUnreachableObjects() {
return $(() -> {
UnreachableObjectsHistogram histogram =
(UnreachableObjectsHistogram) context.snapshot.getSnapshotInfo().getProperty(
UnreachableObjectsHistogram.class.getName());
UnreachableObject.Summary summary =
new UnreachableObject.Summary();
if (histogram != null) {
summary.setTotalSize(histogram.getRowCount());
int objects = 0;
long shallowSize = 0;
for (Object record : histogram.getRecords()) {
objects += (Integer) histogram.getColumnValue(record, 1);
shallowSize += ((Bytes) histogram.getColumnValue(record, 2)).getValue();
}
summary.setObjects(objects);
summary.setShallowSize(shallowSize);
}
return summary;
});
}
@Override
public PageView<UnreachableObject.Item> getUnreachableObjects(int page, int pageSize) {
return $(() -> {
UnreachableObjectsHistogram histogram =
(UnreachableObjectsHistogram) context.snapshot.getSnapshotInfo().getProperty(
UnreachableObjectsHistogram.class.getName());
List<?> total = new ArrayList<>(histogram.getRecords());
total.sort((Comparator<Object>) (o1, o2) -> {
long v2 = ((Bytes) histogram.getColumnValue(o2, 2)).getValue();
long v1 = ((Bytes) histogram.getColumnValue(o1, 2)).getValue();
return Long.compare(v2, v1);
});
return PageViewBuilder.build(total, new PagingRequest(page, pageSize), record -> {
UnreachableObject.Item r = new UnreachableObject.Item();
r.setClassName((String) histogram.getColumnValue(record, 0));
r.setObjectId(Helper.fetchObjectId(histogram.getContext(record)));
r.setObjects((Integer) histogram.getColumnValue(record, 1));
r.setShallowSize(((Bytes) histogram.getColumnValue(record, 2)).getValue());
return r;
});
});
}
private DirectByteBufferData queryDirectByteBufferData(
AnalysisContext context) throws SnapshotException {
DirectByteBufferData data = context.directByteBufferData.get();
if (data != null) {
return data;
}
//noinspection SynchronizationOnLocalVariableOrMethodParameter
synchronized (context) {
data = context.directByteBufferData.get();
if (data != null) {
return data;
}
data = new DirectByteBufferData();
IResult result = queryByCommand(context, "oql", DirectByteBufferData.ARGS);
IResultTable table;
if (result instanceof IResultTable) {
table = (IResultTable) result;
RefinedResultBuilder builder =
new RefinedResultBuilder(new SnapshotQueryContext(context.snapshot), table);
builder.setSortOrder(3, Column.SortDirection.DESC);
data.resultContext = (RefinedTable) builder.build();
DirectByteBuffer.Summary summary = new DirectByteBuffer.Summary();
summary.totalSize = data.resultContext.getRowCount();
for (int i = 0; i < summary.totalSize; i++) {
Object row = data.resultContext.getRow(i);
summary.position += data.position(row);
summary.limit += data.limit(row);
summary.capacity += data.capacity(row);
}
data.summary = summary;
} else {
data.summary = new DirectByteBuffer.Summary();
}
context.directByteBufferData = new SoftReference<>(data);
return data;
}
}
@Override
public DirectByteBuffer.Summary getSummaryOfDirectByteBuffers() {
return $(() -> queryDirectByteBufferData(context).summary);
}
@Override
public PageView<DirectByteBuffer.Item> getDirectByteBuffers(int page, int pageSize) {
return $(() -> {
DirectByteBufferData data = queryDirectByteBufferData(context);
RefinedTable resultContext = data.resultContext;
return PageViewBuilder.build(new PageViewBuilder.Callback<Object>() {
@Override
public int totalSize() {
return data.summary.totalSize;
}
@Override
public Object get(int index) {
return resultContext.getRow(index);
}
}, new PagingRequest(page, pageSize), row -> {
DirectByteBuffer.Item item = new DirectByteBuffer.Item();
item.objectId = resultContext.getContext(row).getObjectId();
item.label = data.label(row);
item.position = data.position(row);
item.limit = data.limit(row);
item.capacity = data.capacity(row);
return item;
});
});
}
private PageView<JavaObject> queryIOBoundsOfObject(AnalysisContext context, int objectId, int page,
int pageSize, boolean outbound) throws SnapshotException {
ISnapshot snapshot = context.snapshot;
int[] ids = outbound ? snapshot.getOutboundReferentIds(objectId) : snapshot.getInboundRefererIds(objectId);
return PageViewBuilder.build(ids, new PagingRequest(page, pageSize), id -> {
try {
JavaObject o = new JavaObject();
IObject object = context.snapshot.getObject(id);
o.setObjectId(id);
o.setLabel(object.getDisplayName());
o.setShallowSize(object.getUsedHeapSize());
o.setRetainedSize(object.getRetainedHeapSize());
o.setObjectType(typeOf(object));
o.setGCRoot(snapshot.isGCRoot(id));
o.setHasOutbound(true);
o.setHasInbound(true);
o.setPrefix(Helper.prefix(snapshot, outbound ? objectId : id, outbound ? id : objectId));
o.setSuffix(Helper.suffix(snapshot, id));
return o;
} catch (Exception e) {
throw new RuntimeException(e);
}
});
}
@Override
public PageView<JavaObject> getOutboundOfObject(int objectId, int page, int pageSize) {
return $(() -> queryIOBoundsOfObject(context, objectId, page, pageSize, true));
}
@Override
public PageView<JavaObject> getInboundOfObject(int objectId, int page, int pageSize) {
return $(() -> queryIOBoundsOfObject(context, objectId, page, pageSize, false));
}
@Override
public List<GCRoot.Item> getGCRoots() {
return $(() -> {
IResultTree tree = queryByCommand(context, "gc_roots");
return tree.getElements().stream().map(e -> {
GCRoot.Item item = new GCRoot.Item();
item.setClassName((String) tree.getColumnValue(e, 0));
item.setObjects((Integer) tree.getColumnValue(e, 1));
return item;
}).collect(Collectors.toList());
});
}
@Override
public PageView<TheString.Item> getStrings(String pattern, int page, int pageSize) {
return $(() -> {
IResultTree tree = queryByCommand(context, "find_strings java.lang.String -pattern " +
(pattern == null || pattern.equals("") ? ".*" : ".*" + pattern + ".*"));
List<?> strings = tree.getElements();
return PageViewBuilder.build(strings, new PagingRequest(page, pageSize), node -> {
TheString.Item item = new TheString.Item();
int id = tree.getContext(node).getObjectId();
item.setObjectId(id);
item.setLabel((String)tree.getColumnValue(node,0));
item.setShallowSize(((Bytes) tree.getColumnValue(node, 1)).getValue());
item.setRetainedSize(((Bytes) tree.getColumnValue(node, 2)).getValue());
return item;
});
});
}
@Override
public PageView<GCRoot.Item> getClassesOfGCRoot(int rootTypeIndex, int page, int pageSize) {
return $(() -> {
IResultTree tree = queryByCommand(context, "gc_roots");
Object root = tree.getElements().get(rootTypeIndex);
List<?> classes = tree.getChildren(root);
return PageViewBuilder.build(classes, new PagingRequest(page, pageSize), clazz -> {
GCRoot.Item item = new GCRoot.Item();
item.setClassName((String) tree.getColumnValue(clazz, 0));
item.setObjects((Integer) tree.getColumnValue(clazz, 1));
item.setObjectId(tree.getContext(clazz).getObjectId());
return item;
});
});
}
@Override
public PageView<JavaObject> getObjectsOfGCRoot(int rootTypeIndex, int classIndex, int page, int pageSize) {
return $(() -> {
IResultTree tree = queryByCommand(context, "gc_roots");
Object root = tree.getElements().get(rootTypeIndex);
List<?> classes = tree.getChildren(root);
Object clazz = classes.get(classIndex);
List<?> objects = tree.getChildren(clazz);
return PageViewBuilder.build(objects, new PagingRequest(page, pageSize),
o -> $(() -> {
JavaObject ho = new JavaObject();
int objectId = tree.getContext(o).getObjectId();
IObject object = context.snapshot.getObject(objectId);
ho.setLabel(object.getDisplayName());
ho.setObjectId(objectId);
ho.setShallowSize(object.getUsedHeapSize());
ho.setRetainedSize(object.getRetainedHeapSize());
ho.setObjectType(typeOf(object));
ho.setGCRoot(context.snapshot.isGCRoot(objectId));
ho.setSuffix(Helper.suffix(object.getGCRootInfo()));
ho.setHasOutbound(true);
ho.setHasInbound(true);
return ho;
}
));
});
}
private IResultTree queryIOBoundClassOfClassReference(AnalysisContext context, Object idOrIds,
boolean outbound) throws SnapshotException {
Map<String, Object> args = new HashMap<>();
if (idOrIds instanceof int[]) {
args.put("objects", idOrIds);
} else {
args.put("objects", new int[]{(Integer) idOrIds});
}
args.put("inbound", !outbound);
return queryByCommand(context, "class_references", args);
}
private ClassReferrer.Item buildClassReferenceItem(IResultTree result, Object row) {
ClassReferrer.Item item = new ClassReferrer.Item();
item.label = (String) result.getColumnValue(row, 0);
item.objects = (Integer) result.getColumnValue(row, 1);
item.shallowSize = ((Bytes) result.getColumnValue(row, 2)).getValue();
IContextObjectSet context = (IContextObjectSet) result.getContext(row);
item.objectId = context.getObjectId();
item.objectIds = context.getObjectIds();
item.setType(getClassReferrerType(((IIconProvider) result).getIcon(row)));
return item;
}
@Override
public ClassReferrer.Item getOutboundClassOfClassReference(int objectId) {
return $(() -> {
IResultTree result = queryIOBoundClassOfClassReference(context, objectId, true);
return buildClassReferenceItem(result, result.getElements().get(0));
});
}
@Override
public ClassReferrer.Item getInboundClassOfClassReference(int objectId) {
return $(() -> {
IResultTree result = queryIOBoundClassOfClassReference(context, objectId, false);
return buildClassReferenceItem(result, result.getElements().get(0));
});
}
@Override
public PageView<ClassReferrer.Item> getOutboundsOfClassReference(int[] objectId, int page, int pageSize) {
return $(() -> {
IResultTree result = queryIOBoundClassOfClassReference(context, objectId, true);
return PageViewBuilder
.build(result.getChildren(result.getElements().get(0)), new PagingRequest(page, pageSize),
e -> buildClassReferenceItem(result, e));
});
}
@Override
public PageView<ClassReferrer.Item> getInboundsOfClassReference(int[] objectId, int page, int pageSize) {
return $(() -> {
IResultTree result = queryIOBoundClassOfClassReference(context, objectId, false);
return PageViewBuilder
.build(result.getChildren(result.getElements().get(0)), new PagingRequest(page, pageSize),
e -> buildClassReferenceItem(result, e));
});
}
@Override
public Comparison.Summary getSummaryOfComparison(Path other) {
return $(() -> {
ISnapshot baselineSnapshot = ((HeapDumpAnalyzerImpl) PROVIDER.provide(other, Collections.emptyMap(),
NoOpProgressListener)).context.snapshot;
ISnapshot targetSnapshot = context.snapshot;
Histogram targetHistogram = targetSnapshot.getHistogram(new ProgressListenerImpl(NoOpProgressListener));
Histogram baselineHistogram = baselineSnapshot.getHistogram(new ProgressListenerImpl(NoOpProgressListener));
final Histogram delta = targetHistogram.diffWithBaseline(baselineHistogram);
long totalObjects = 0;
long totalShallowHeap = 0;
for (Object r : delta.getClassHistogramRecords()) {
totalObjects += (long) delta.getColumnValue(r, 1);
totalShallowHeap += ((Bytes) delta.getColumnValue(r, 2)).getValue();
}
Comparison.Summary summary = new Comparison.Summary();
summary.setTotalSize(delta.getClassHistogramRecords().size());
summary.setObjects(totalObjects);
summary.setShallowSize(totalShallowHeap);
return summary;
});
}
@Override
public PageView<Comparison.Item> getItemsOfComparison(Path other, int page, int pageSize) {
return $(() -> {
ISnapshot baselineSnapshot = ((HeapDumpAnalyzerImpl) PROVIDER.provide(other, Collections.emptyMap(),
NoOpProgressListener)).context.snapshot;
ISnapshot targetSnapshot = context.snapshot;
Histogram targetHistogram = targetSnapshot.getHistogram(new ProgressListenerImpl(NoOpProgressListener));
Histogram baselineHistogram = baselineSnapshot.getHistogram(new ProgressListenerImpl(NoOpProgressListener));
final Histogram delta = targetHistogram.diffWithBaseline(baselineHistogram);
//noinspection
((List<ClassHistogramRecord>) delta.getClassHistogramRecords()).sort((o1, o2) -> Long
.compare(((Bytes) delta.getColumnValue(o2, 2)).getValue(),
((Bytes) delta.getColumnValue(o1, 2)).getValue()));
return PageViewBuilder.build(delta.getClassHistogramRecords(), new PagingRequest(page, pageSize), r -> {
Comparison.Item record = new Comparison.Item();
record.setClassName((String) delta.getColumnValue(r, 0));
record.setObjects((Long) delta.getColumnValue(r, 1));
record.setShallowSize(((Bytes) delta.getColumnValue(r, 2)).getValue());
return record;
});
});
}
private IResultTree queryMultiplePath2GCRootsTreeByClassId(AnalysisContext context, int classId,
GCRootPath.Grouping grouping)
throws Exception {
ClassImpl clazz = (ClassImpl) context.snapshot.getObject(classId);
return queryMultiplePath2GCRootsTreeByObjectIds(context, clazz.getObjectIds(), grouping);
}
private IResultTree queryMultiplePath2GCRootsTreeByObjectIds(AnalysisContext context, int[] objectIds,
GCRootPath.Grouping grouping)
throws Exception {
if (grouping != GCRootPath.Grouping.FROM_GC_ROOTS) {
throw new JifaException("Unsupported grouping now");
}
Map<String, Object> args = new HashMap<>();
args.put("objects", Helper.buildHeapObjectArgument(objectIds));
return queryByCommand(context, "merge_shortest_paths", args);
}
private PageView<GCRootPath.MergePathToGCRootsTreeNode> buildMergePathRootsNode(AnalysisContext context,
IResultTree tree, List<?> elements,
int page, int pageSize) {
return PageViewBuilder.build(elements, new PagingRequest(page, pageSize), element -> $(() -> {
ISnapshot snapshot = context.snapshot;
GCRootPath.MergePathToGCRootsTreeNode record = new GCRootPath.MergePathToGCRootsTreeNode();
int objectId = tree.getContext(element).getObjectId();
IObject object = snapshot.getObject(objectId);
record.setObjectId(tree.getContext(element).getObjectId());
record.setObjectType(typeOf(object));
record.setGCRoot(snapshot.isGCRoot(objectId));
record.setClassName(tree.getColumnValue(element, 0).toString());
record.setSuffix(Helper.suffix(object.getGCRootInfo()));
record.setRefObjects((int) tree.getColumnValue(element, 1));
record.setShallowHeap(((Bytes) tree.getColumnValue(element, 2)).getValue());
record.setRefShallowHeap(((Bytes) tree.getColumnValue(element, 3)).getValue());
record.setRetainedHeap(((Bytes) tree.getColumnValue(element, 4)).getValue());
return record;
}));
}
@Override
public PageView<GCRootPath.MergePathToGCRootsTreeNode> getRootsOfMergePathToGCRootsByClassId(
int classId, GCRootPath.Grouping grouping, int page, int pageSize) {
return $(() -> {
IResultTree tree = queryMultiplePath2GCRootsTreeByClassId(context, classId, grouping);
return buildMergePathRootsNode(context, tree, tree.getElements(), page, pageSize);
});
}
@Override
public PageView<GCRootPath.MergePathToGCRootsTreeNode> getRootsOfMergePathToGCRootsByObjectIds(
int[] objectIds, GCRootPath.Grouping grouping, int page, int pageSize) {
return $(() -> {
IResultTree tree = queryMultiplePath2GCRootsTreeByObjectIds(context, objectIds, grouping);
return buildMergePathRootsNode(context, tree, tree.getElements(), page, pageSize);
});
}
@Override
public PageView<GCRootPath.MergePathToGCRootsTreeNode> getChildrenOfMergePathToGCRootsByClassId(
int classId, int[] objectIdPathInGCPathTree, GCRootPath.Grouping grouping,
int page, int pageSize) {
return $(() -> {
IResultTree tree = queryMultiplePath2GCRootsTreeByClassId(context, classId, grouping);
Object object = Helper.fetchObjectInResultTree(tree, objectIdPathInGCPathTree);
List<?> elements = object == null ? Collections.emptyList() : tree.getChildren(object);
return buildMergePathRootsNode(context, tree, elements, page, pageSize);
});
}
@Override
public PageView<GCRootPath.MergePathToGCRootsTreeNode> getChildrenOfMergePathToGCRootsByObjectIds(
int[] objectIds, int[] objectIdPathInGCPathTree, GCRootPath.Grouping grouping,
int page, int pageSize) {
return $(() -> {
IResultTree tree = queryMultiplePath2GCRootsTreeByObjectIds(context, objectIds, grouping);
Object object = Helper.fetchObjectInResultTree(tree, objectIdPathInGCPathTree);
List<?> elements = object == null ? Collections.emptyList() : tree.getChildren(object);
return buildMergePathRootsNode(context, tree, elements, page, pageSize);
});
}
@Override
public GCRootPath.Item getPathToGCRoots(int originId, int skip, int count) {
return $(() -> {
ISnapshot snapshot = context.snapshot;
Map<IClass, Set<String>> excludeMap = convert(context, GCRootPath.EXCLUDES);
IPathsFromGCRootsComputer computer = snapshot.getPathsFromGCRoots(originId, excludeMap);
List<int[]> paths = new ArrayList<>();
int index = 0;
int[] current;
int get = 0;
while (get < count && (current = computer.getNextShortestPath()) != null) {
if (index < skip) {
index++;
continue;
}
paths.add(current);
get++;
}
boolean hasMore = computer.getNextShortestPath() != null;
GCRootPath.Item item = new GCRootPath.Item();
item.setCount(paths.size());
item.setHasMore(hasMore);
GCRootPath.Node origin = new GCRootPath.Node();
IObject object = snapshot.getObject(originId);
origin.setOrigin(true);
origin.setObjectId(originId);
origin.setLabel(object.getDisplayName());
origin.setSuffix(Helper.suffix(snapshot, originId));
origin.setGCRoot(snapshot.isGCRoot(originId));
origin.setObjectType(typeOf(object));
origin.setShallowSize(object.getUsedHeapSize());
origin.setRetainedSize(object.getRetainedHeapSize());
item.setTree(origin);
if (paths.size() == 0) {
return item;
}
for (int[] path : paths) {
GCRootPath.Node parentNode = origin;
for (index = 1; index < path.length; index++) {
int childId = path[index];
GCRootPath.Node childNode = parentNode.getChild(childId);
if (childNode == null) {
IObject childObj = snapshot.getObject(childId);
childNode = new GCRootPath.Node();
childNode.setObjectId(childId);
childNode.setPrefix(Helper.prefix(snapshot, childId, parentNode.getObjectId()));
childNode.setLabel(childObj.getDisplayName());
childNode.setSuffix(Helper.suffix(snapshot, childId));
childNode.setGCRoot(snapshot.isGCRoot(childId));
childNode.setObjectType(typeOf(childObj));
childNode.setShallowSize(childObj.getUsedHeapSize());
childNode.setRetainedSize(childObj.getRetainedHeapSize());
parentNode.addChild(childNode);
}
parentNode = childNode;
}
}
return item;
});
}
private LeakReport.ShortestPath buildPath(ISnapshot snapshot, RefinedTree rst,
Object row) throws SnapshotException {
LeakReport.ShortestPath shortestPath = new LeakReport.ShortestPath();
shortestPath.setLabel((String) rst.getColumnValue(row, 0));
shortestPath.setShallowSize(((Bytes) rst.getColumnValue(row, 1)).getValue());
shortestPath.setRetainedSize(((Bytes) rst.getColumnValue(row, 2)).getValue());
int objectId = rst.getContext(row).getObjectId();
shortestPath.setObjectId(objectId);
IObject object = snapshot.getObject(objectId);
shortestPath.setGCRoot(snapshot.isGCRoot(objectId));
shortestPath.setObjectType(typeOf(object));
if (rst.hasChildren(row)) {
List<LeakReport.ShortestPath> children = new ArrayList<>();
shortestPath.setChildren(children);
for (Object c : rst.getChildren(row)) {
children.add(buildPath(snapshot, rst, c));
}
}
return shortestPath;
}
@Override
public LeakReport getLeakReport() {
return $(() -> {
AnalysisContext.LeakReportData data = context.leakReportData.get();
if (data == null) {
synchronized (context) {
data = context.leakReportData.get();
if (data == null) {
IResult result = queryByCommand(context, "leakhunter");
data = new AnalysisContext.LeakReportData();
data.result = result;
context.leakReportData = new SoftReference<>(data);
}
}
}
IResult result = data.result;
LeakReport report = new LeakReport();
if (result instanceof TextResult) {
report.setInfo(((TextResult) result).getText());
} else if (result instanceof SectionSpec) {
report.setUseful(true);
SectionSpec sectionSpec = (SectionSpec) result;
report.setName(sectionSpec.getName());
List<Spec> specs = sectionSpec.getChildren();
for (int i = 0; i < specs.size(); i++) {
QuerySpec spec = (QuerySpec) specs.get(i);
String name = spec.getName();
if (name == null || name.isEmpty()) {
continue;
}
// LeakHunterQuery_Overview
if (name.startsWith("Overview")) {
IResultPie irtPie = (IResultPie) spec.getResult();
List<? extends IResultPie.Slice> pieSlices = irtPie.getSlices();
List<LeakReport.Slice> slices = new ArrayList<>();
for (IResultPie.Slice slice : pieSlices) {
slices.add(
new LeakReport.Slice(slice.getLabel(),
Helper.fetchObjectId(slice.getContext()),
slice.getValue(), slice.getDescription()));
}
report.setSlices(slices);
}
// LeakHunterQuery_ProblemSuspect
// LeakHunterQuery_Hint
else if (name.startsWith("Problem Suspect") || name.startsWith("Hint")) {
LeakReport.Record suspect = new LeakReport.Record();
suspect.setIndex(i);
suspect.setName(name);
CompositeResult cr = (CompositeResult) spec.getResult();
List<CompositeResult.Entry> entries = cr.getResultEntries();
for (CompositeResult.Entry entry : entries) {
String entryName = entry.getName();
if (entryName == null || entryName.isEmpty()) {
IResult r = entry.getResult();
if (r instanceof QuerySpec &&
// LeakHunterQuery_ShortestPaths
((QuerySpec) r).getName().equals("Shortest Paths To the Accumulation Point")) {
IResultTree tree = (IResultTree) ((QuerySpec) r).getResult();
RefinedResultBuilder builder = new RefinedResultBuilder(
new SnapshotQueryContext(context.snapshot), tree);
RefinedTree rst = (RefinedTree) builder.build();
List<?> elements = rst.getElements();
List<LeakReport.ShortestPath> paths = new ArrayList<>();
suspect.setPaths(paths);
for (Object row : elements) {
paths.add(buildPath(context.snapshot, rst, row));
}
}
}
// LeakHunterQuery_Description
// LeakHunterQuery_Overview
else if ((entryName.startsWith("Description") || entryName.startsWith("Overview"))) {
TextResult desText = (TextResult) entry.getResult();
suspect.setDesc(desText.getText());
}
}
List<LeakReport.Record> records = report.getRecords();
if (records == null) {
report.setRecords(records = new ArrayList<>());
}
records.add(suspect);
}
}
}
return report;
});
}
@Cacheable
protected IResult getOQLResult(AnalysisContext context, String oql) {
return $(() -> {
Map<String, Object> args = new HashMap<>();
args.put("queryString", oql);
return queryByCommand(context, "oql", args);
});
}
@Override
public CalciteSQLResult getCalciteSQLResult(String sql, String sortBy, boolean ascendingOrder, int page, int pageSize) {
return $(() -> {
Map<String, Object> args = new HashMap<>();
args.put("sql", sql);
IResult result;
try {
result = queryByCommand(context, "calcite", args);
} catch (Throwable t) {
return new CalciteSQLResult.TextResult(t.getMessage());
}
if (result instanceof IResultTree) {
return new CalciteSQLResult.TreeResult(
PageViewBuilder.build(
((IResultTree) result).getElements(),
new PagingRequest(page, pageSize),
e -> $(()-> context.snapshot.getObject(((IResultTree) result).getContext(e).getObjectId())),
o -> $(() -> {
JavaObject jo = new JavaObject();
jo.setObjectId(o.getObjectId());
jo.setLabel(o.getDisplayName());
jo.setSuffix(Helper.suffix(o.getGCRootInfo()));
jo.setShallowSize(o.getUsedHeapSize());
jo.setRetainedSize(o.getRetainedHeapSize());
jo.setGCRoot(context.snapshot.isGCRoot(o.getObjectId()));
jo.setObjectType(typeOf(o));
jo.setHasOutbound(true);
return jo;
}), IObjectSortHelper.sortBy(sortBy, ascendingOrder)));
} else if (result instanceof IResultTable) {
IResultTable table = (IResultTable) result;
Column[] columns = table.getColumns();
List<String> cs = Arrays.stream(columns).map(Column::getLabel).collect(Collectors.toList());
PageView<CalciteSQLResult.TableResult.Entry> pv =
PageViewBuilder.build(new PageViewBuilder.Callback<Object>() {
@Override
public int totalSize() {
return table.getRowCount();
}
@Override
public Object get(int index) {
return table.getRow(index);
}
}, new PagingRequest(page, pageSize), o -> {
List<Object> l = new ArrayList<>();
for (int i = 0; i < columns.length; i++) {
Object columnValue = table.getColumnValue(o, i);
l.add(columnValue != null ? EscapeUtil.unescapeJava(columnValue.toString()) : null);
}
IContextObject co = table.getContext(o);
return new CalciteSQLResult.TableResult.Entry(co != null ? co.getObjectId() : Helper.ILLEGAL_OBJECT_ID,
l);
});
return new CalciteSQLResult.TableResult(cs, pv);
} else if (result instanceof TextResult) {
return new CalciteSQLResult.TextResult(((TextResult) result).getText());
}
return new CalciteSQLResult.TextResult("Unsupported Calcite SQL result type");
});
}
static class IObjectSortHelper {
static Map<String, Comparator<IObject>> sortTable = new SortTableGenerator<IObject>()
.add("id", IObject::getObjectId)
.add("shallowHeap", IObject::getUsedHeapSize)
.add("retainedHeap", IObject::getRetainedHeapSize)
.add("label", IObject::getDisplayName)
.build();
public static Comparator<IObject> sortBy(String field, boolean ascendingOrder) {
return ascendingOrder ? sortTable.get(field) : sortTable.get(field).reversed();
}
}
public OQLResult getOQLResult(String oql, String sortBy, boolean ascendingOrder, int page, int pageSize) {
IResult result = getOQLResult(context, oql);
return $(() -> {
if (result instanceof IResultTree) {
return new OQLResult.TreeResult(
PageViewBuilder.build(
((IResultTree) result).getElements(),
new PagingRequest(page, pageSize),
e -> $(() -> context.snapshot.getObject(((IResultTree) result).getContext(e).getObjectId())),
o -> $(() -> {
JavaObject jo = new JavaObject();
jo.setObjectId(o.getObjectId());
jo.setLabel(o.getDisplayName());
jo.setSuffix(Helper.suffix(o.getGCRootInfo()));
jo.setShallowSize(o.getUsedHeapSize());
jo.setRetainedSize(o.getRetainedHeapSize());
jo.setGCRoot(context.snapshot.isGCRoot(o.getObjectId()));
jo.setObjectType(typeOf(o));
jo.setHasOutbound(true);
return jo;
}), IObjectSortHelper.sortBy(sortBy, ascendingOrder)));
} else if (result instanceof IResultTable) {
IResultTable table = (IResultTable) result;
Column[] columns = table.getColumns();
List<String> cs = Arrays.stream(columns).map(Column::getLabel).collect(Collectors.toList());
PageView<OQLResult.TableResult.Entry> pv =
PageViewBuilder.build(new PageViewBuilder.Callback<Object>() {
@Override
public int totalSize() {
return table.getRowCount();
}
@Override
public Object get(int index) {
return table.getRow(index);
}
}, new PagingRequest(page, pageSize), o -> {
List<Object> l = new ArrayList<>();
for (int i = 0; i < columns.length; i++) {
Object columnValue = table.getColumnValue(o, i);
l.add(columnValue != null ? columnValue.toString() : null);
}
IContextObject co = table.getContext(o);
return new OQLResult.TableResult.Entry(co != null ? co.getObjectId() : Helper.ILLEGAL_OBJECT_ID,
l);
});
return new OQLResult.TableResult(cs, pv);
} else if (result instanceof TextResult) {
return new OQLResult.TextResult(((TextResult) result).getText());
} else {
throw new AnalysisException("Unsupported OQL result type");
}
});
}
@Override
public Model.Thread.Summary getSummaryOfThreads(String searchText,
SearchType searchType) {
return $(() -> {
IResultTree result = queryByCommand(context, "thread_overview");
List<Model.Thread.Item> items = result.getElements().stream()
.map(row -> new VirtualThreadItem(result, row))
.filter(SearchPredicate.createPredicate(searchText, searchType))
.collect(Collectors.toList());
Model.Thread.Summary summary = new Model.Thread.Summary();
summary.totalSize = items.size();
summary.shallowHeap = items.stream().mapToLong(Model.Thread.Item::getShallowSize).sum();
summary.retainedHeap = items.stream().mapToLong(Model.Thread.Item::getRetainedSize).sum();;
return summary;
});
}
@Override
public PageView<Model.Thread.Item> getThreads(String sortBy, boolean ascendingOrder, String searchText,
SearchType searchType, int page, int pageSize) {
PagingRequest pagingRequest = new PagingRequest(page, pageSize);
return $(() -> {
IResultTree result = queryByCommand(context, "thread_overview");
final AtomicInteger afterFilterCount = new AtomicInteger(0);
List<Model.Thread.Item> items = result.getElements().stream()
.map(row -> new VirtualThreadItem(result, row))
.filter(SearchPredicate.createPredicate(searchText, searchType))
.peek(filtered -> afterFilterCount.incrementAndGet())
.sorted(Model.Thread.Item.sortBy(sortBy, ascendingOrder))
.skip(pagingRequest.from())
.limit(pagingRequest.getPageSize())
.collect(Collectors.toList());
return new PageView(pagingRequest, afterFilterCount.get(), items);
});
}
@Override
public List<Model.Thread.StackFrame> getStackTrace(int objectId) {
return $(() -> {
Map<String, Object> args = new HashMap<>();
args.put("objects", Helper.buildHeapObjectArgument(new int[]{objectId}));
IResultTree result = queryByCommand(context, "thread_overview", args);
List<?> elements = result.getElements();
boolean includesMaxLocalRetained = (result.getColumns().length == 10);
if (result.hasChildren(elements.get(0))) {
List<?> frames = result.getChildren(elements.get(0));
List<Model.Thread.StackFrame> res = frames.stream().map(
frame -> new Model.Thread.StackFrame(((String) result.getColumnValue(frame, 0)),
result.hasChildren(frame),
(includesMaxLocalRetained && result.getColumnValue(frame, 4) != null)
? ((Bytes) result.getColumnValue(frame, 4)).getValue()
: 0L
)).collect(Collectors.toList());
res.stream().filter(t -> !t.getStack().contains("Native Method")).findFirst()
.ifPresent(sf -> sf.setFirstNonNativeFrame(true));
return res;
}
return Collections.emptyList();
});
}
@Override
public List<Model.Thread.LocalVariable> getLocalVariables(int objectId, int depth, boolean firstNonNativeFrame) {
return $(() -> {
Map<String, Object> args = new HashMap<>();
args.put("objects", Helper.buildHeapObjectArgument(new int[]{objectId}));
IResultTree result = queryByCommand(context, "thread_overview", args);
List<?> elements = result.getElements();
if (result.hasChildren(elements.get(0))) {
List<?> frames = result.getChildren(elements.get(0));
Object frame = frames.get(depth - 1);
if (result.hasChildren(frame)) {
List<?> locals = result.getChildren(frame);
return locals.stream().map(local -> {
int id = result.getContext(local).getObjectId();
Model.Thread.LocalVariable var = new Model.Thread.LocalVariable();
var.setObjectId(id);
try {
IObject object = context.snapshot.getObject(id);
var.setLabel(object.getDisplayName());
var.setShallowSize(object.getUsedHeapSize());
var.setRetainedSize(object.getRetainedHeapSize());
var.setObjectType(typeOf(object));
var.setGCRoot(context.snapshot.isGCRoot(id));
var.setHasOutbound(result.hasChildren(var));
// ThreadStackQuery_Label_Local
var.setPrefix("<local>");
if (firstNonNativeFrame) {
GCRootInfo[] gcRootInfos = object.getGCRootInfo();
if (gcRootInfos != null) {
for (GCRootInfo gcRootInfo : gcRootInfos) {
if (gcRootInfo.getContextId() != 0 &&
(gcRootInfo.getType() & GCRootInfo.Type.BUSY_MONITOR) != 0 &&
gcRootInfo.getContextId() == objectId) {
// ThreadStackQuery_Label_Local_Blocked_On
var.setPrefix("<local, blocked on>");
}
}
}
}
var.setSuffix(Helper.suffix(context.snapshot, id));
return var;
} catch (SnapshotException e) {
throw new JifaException(e);
}
}).collect(Collectors.toList());
}
}
return Collections.emptyList();
});
}
@Override
public PageView<DuplicatedClass.ClassItem> getDuplicatedClasses(String searchText,
SearchType searchType, int page, int pageSize) {
return $(() -> {
IResultTree result = queryByCommand(context, "duplicate_classes");
List<?> classes = result.getElements();
classes.sort((o1, o2) -> ((List<?>) o2).size() - ((List<?>) o1).size());
PageViewBuilder<?, DuplicatedClass.ClassItem> builder = PageViewBuilder.fromList(classes);
return builder.paging(new PagingRequest(page, pageSize))
.map(r -> {
DuplicatedClass.ClassItem item = new DuplicatedClass.ClassItem();
item.setLabel((String) result.getColumnValue(r, 0));
item.setCount((Integer) result.getColumnValue(r, 1));
return item;
})
.filter(SearchPredicate.createPredicate(searchText, searchType))
.done();
});
}
@Override
public PageView<DuplicatedClass.ClassLoaderItem> getClassloadersOfDuplicatedClass(int index, int page,
int pageSize) {
return $(() -> {
IResultTree result = queryByCommand(context, "duplicate_classes");
List<?> classes = result.getElements();
classes.sort((o1, o2) -> ((List<?>) o2).size() - ((List<?>) o1).size());
List<?> classLoaders = (List<?>) classes.get(index);
return PageViewBuilder.build(classLoaders, new PagingRequest(page, pageSize), r -> {
DuplicatedClass.ClassLoaderItem item = new DuplicatedClass.ClassLoaderItem();
item.setLabel((String) result.getColumnValue(r, 0));
item.setDefinedClassesCount((Integer) result.getColumnValue(r, 2));
item.setInstantiatedObjectsCount((Integer) result.getColumnValue(r, 3));
GCRootInfo[] roots;
try {
roots = ((IClass) r).getGCRootInfo();
} catch (SnapshotException e) {
throw new JifaException(e);
}
int id = ((IClass) r).getClassLoaderId();
item.setObjectId(id);
item.setGCRoot(context.snapshot.isGCRoot(id));
item.setSuffix(roots != null ? GCRootInfo.getTypeSetAsString(roots) : null);
return item;
});
});
}
@Override
public PageView<Model.Histogram.Item> getHistogram(Model.Histogram.Grouping groupingBy,
int[] ids, String sortBy, boolean ascendingOrder,
String searchText, SearchType searchType,
int page, int pageSize) {
return $(() -> {
Map<String, Object> args = new HashMap<>();
if (ids != null) {
args.put("objects", Helper.buildHeapObjectArgument(ids));
}
IResult result = queryByCommand(context, "histogram -groupBy " + groupingBy.name(), args);
switch (groupingBy) {
case BY_CLASS:
Histogram h = (Histogram) result;
List<ClassHistogramRecord> records =
(List<ClassHistogramRecord>) h.getClassHistogramRecords();
return PageViewBuilder.<ClassHistogramRecord, Model.Histogram.Item>fromList(records)
.beforeMap(record -> $(() -> record
.calculateRetainedSize(context.snapshot, true, true, Helper.VOID_LISTENER)))
.paging(new PagingRequest(page, pageSize))
.map(record -> new Model.Histogram.Item(record.getClassId(), record.getLabel(),
Model.Histogram.ItemType.CLASS,
record.getNumberOfObjects(),
record.getUsedHeapSize(),
record.getRetainedHeapSize()))
.sort(Model.Histogram.Item.sortBy(sortBy, ascendingOrder))
.filter(createPredicate(searchText, searchType))
.done();
case BY_CLASSLOADER:
Histogram.ClassLoaderTree ct = (Histogram.ClassLoaderTree) result;
@SuppressWarnings("unchecked")
PageViewBuilder<? extends XClassLoaderHistogramRecord, Model.Histogram.Item> builder =
PageViewBuilder.fromList((List<? extends XClassLoaderHistogramRecord>) ct.getElements());
return builder
.beforeMap(record -> $(() -> record.calculateRetainedSize(context.snapshot, true, true,
Helper.VOID_LISTENER)))
.paging(new PagingRequest(page, pageSize))
.map(record ->
new Model.Histogram.Item(record.getClassLoaderId(), record.getLabel(),
Model.Histogram.ItemType.CLASS_LOADER,
record.getNumberOfObjects(),
record.getUsedHeapSize(),
record.getRetainedHeapSize())
)
.sort(Model.Histogram.Item.sortBy(sortBy, ascendingOrder))
.filter(createPredicate(searchText, searchType))
.done();
case BY_SUPERCLASS:
Histogram.SuperclassTree st = (Histogram.SuperclassTree) result;
//noinspection unchecked
return PageViewBuilder.<HistogramRecord, Model.Histogram.Item>fromList(
(List<HistogramRecord>) st.getElements())
.paging(new PagingRequest(page, pageSize))
.map(e -> {
Model.Histogram.Item item = new Model.Histogram.Item();
int objectId = st.getContext(e).getObjectId();
item.setType(Model.Histogram.ItemType.SUPER_CLASS);
item.setObjectId(objectId);
item.setLabel((String) st.getColumnValue(e, 0));
item.setNumberOfObjects((Long) st.getColumnValue(e, 1));
item.setShallowSize(((Bytes) st.getColumnValue(e, 2)).getValue());
return item;
})
.sort(Model.Histogram.Item.sortBy(sortBy, ascendingOrder))
.filter(createPredicate(searchText, searchType))
.done();
case BY_PACKAGE:
Histogram.PackageTree pt = (Histogram.PackageTree) result;
//noinspection unchecked
return
PageViewBuilder.<HistogramRecord, Model.Histogram.Item>fromList(
(List<HistogramRecord>) pt.getElements())
.paging(new PagingRequest(page, pageSize))
.map(e -> {
Model.Histogram.Item item = new Model.Histogram.Item();
String label = (String) pt.getColumnValue(e, 0);
item.setLabel(label);
if (e instanceof XClassHistogramRecord) {
int objectId = pt.getContext(e).getObjectId();
item.setObjectId(objectId);
item.setType(Model.Histogram.ItemType.CLASS);
} else {
item.setObjectId(label.hashCode());
item.setType(Model.Histogram.ItemType.PACKAGE);
}
if (label.matches("^int(\\[\\])*") || label.matches("^char(\\[\\])*") ||
label.matches("^byte(\\[\\])*") || label.matches("^short(\\[\\])*") ||
label.matches("^boolean(\\[\\])*") ||
label.matches("^double(\\[\\])*") ||
label.matches("^float(\\[\\])*") || label.matches("^long(\\[\\])*") ||
label.matches("^void(\\[\\])*")) {
item.setType(Model.Histogram.ItemType.CLASS);
}
item.setNumberOfObjects((Long) pt.getColumnValue(e, 1));
item.setShallowSize(((Bytes) pt.getColumnValue(e, 2)).getValue());
return item;
})
.sort(Model.Histogram.Item.sortBy(sortBy, ascendingOrder))
.filter(createPredicate(searchText, searchType))
.done();
default:
throw new AnalysisException("Should not reach here");
}
});
}
@Override
public PageView<JavaObject> getHistogramObjects(int classId, int page, int pageSize) {
return $(() -> {
IResult result = queryByCommand(context, "histogram -groupBy BY_CLASS", Collections.emptyMap());
Histogram h = (Histogram) result;
List<ClassHistogramRecord> records =
(List<ClassHistogramRecord>) h.getClassHistogramRecords();
Optional<ClassHistogramRecord> ro = records.stream().filter(r -> r.getClassId() == classId).findFirst();
if (ro.isPresent()) {
IContextObject c = ((Histogram) result).getContext(ro.get());
if (c instanceof IContextObjectSet) {
int[] objectIds = ((IContextObjectSet) c).getObjectIds();
return PageViewBuilder.build(objectIds, new PagingRequest(page, pageSize), this::getObjectInfo);
}
}
return PageView.empty();
});
}
@Override
public PageView<Model.Histogram.Item> getChildrenOfHistogram(Model.Histogram.Grouping groupBy, int[] ids,
String sortBy, boolean ascendingOrder,
int parentObjectId, int page, int pageSize) {
return $(() -> {
Map<String, Object> args = new HashMap<>();
if (ids != null) {
args.put("objects", Helper.buildHeapObjectArgument(ids));
}
IResult result = queryByCommand(context, "histogram -groupBy " + groupBy.name(), args);
switch (groupBy) {
case BY_CLASS: {
throw new AnalysisException("Should not reach here");
}
case BY_CLASSLOADER: {
Histogram.ClassLoaderTree tree = (Histogram.ClassLoaderTree) result;
List<?> elems = tree.getElements();
List<? extends ClassHistogramRecord> children = null;
for (Object elem : elems) {
if (elem instanceof XClassLoaderHistogramRecord) {
if (((XClassLoaderHistogramRecord) elem).getClassLoaderId() == parentObjectId) {
children = (List<? extends ClassHistogramRecord>) ((XClassLoaderHistogramRecord) elem)
.getClassHistogramRecords();
break;
}
}
}
if (children != null) {
//noinspection unchecked
return PageViewBuilder.<ClassHistogramRecord, Model.Histogram.Item>fromList(
(List<ClassHistogramRecord>) children)
.beforeMap(record -> $(() -> record
.calculateRetainedSize(context.snapshot, true, true, Helper.VOID_LISTENER)))
.paging(new PagingRequest(page, pageSize))
.map(record -> new Model.Histogram.Item(record.getClassId(), record.getLabel(),
Model.Histogram.ItemType.CLASS,
record.getNumberOfObjects(),
record.getUsedHeapSize(),
record.getRetainedHeapSize()))
.sort(Model.Histogram.Item.sortBy(sortBy, ascendingOrder))
.done();
} else {
return PageView.empty();
}
}
case BY_SUPERCLASS: {
Histogram.SuperclassTree st = (Histogram.SuperclassTree) result;
List<?> children = new ExoticTreeFinder(st)
.setGetChildrenCallback(node -> {
Map<String, ?> subClasses = ReflectionUtil.getFieldValueOrNull(node, "subClasses");
if (subClasses != null) {
return new ArrayList<>(subClasses.values());
}
return null;
})
.setPredicate((theTree, theNode) -> theTree.getContext(theNode).getObjectId())
.findChildrenOf(parentObjectId);
if (children != null) {
//noinspection unchecked
return PageViewBuilder.<HistogramRecord, Model.Histogram.Item>fromList(
(List<HistogramRecord>) children)
.paging(new PagingRequest(page, pageSize))
.map(e -> {
Model.Histogram.Item item = new Model.Histogram.Item();
int objectId = st.getContext(e).getObjectId();
item.setType(Model.Histogram.ItemType.SUPER_CLASS);
item.setObjectId(objectId);
item.setLabel((String) st.getColumnValue(e, 0));
item.setNumberOfObjects((Long) st.getColumnValue(e, 1));
item.setShallowSize(((Bytes) st.getColumnValue(e, 2)).getValue());
return item;
})
.sort(Model.Histogram.Item.sortBy(sortBy, ascendingOrder))
.done();
} else {
return PageView.empty();
}
}
case BY_PACKAGE: {
Histogram.PackageTree pt = (Histogram.PackageTree) result;
Object targetParentNode = new ExoticTreeFinder(pt)
.setGetChildrenCallback(node -> {
Map<String, ?> subPackages = ReflectionUtil.getFieldValueOrNull(node, "subPackages");
if (subPackages != null) {
return new ArrayList<>(subPackages.values());
} else {
return null;
}
})
.setPredicate((theTree, theNode) -> {
if (!(theNode instanceof XClassHistogramRecord)) {
try {
java.lang.reflect.Field
field = theNode.getClass().getSuperclass().getDeclaredField("label");
field.setAccessible(true);
String labelName = (String) field.get(theNode);
return labelName.hashCode();
} catch (Throwable e) {
e.printStackTrace();
}
}
return null;
})
.findTargetParentNode(parentObjectId);
if (targetParentNode != null) {
Map<String, ?> packageMap = ReflectionUtil.getFieldValueOrNull(targetParentNode, "subPackages");
List<?> elems = new ArrayList<>();
if (packageMap != null) {
if (packageMap.size() == 0) {
elems = ReflectionUtil.getFieldValueOrNull(targetParentNode, "classes");
} else {
elems = new ArrayList<>(packageMap.values());
}
}
//noinspection unchecked
return
PageViewBuilder.<HistogramRecord, Model.Histogram.Item>fromList(
(List<HistogramRecord>) elems)
.paging(new PagingRequest(page, pageSize))
.map(e -> {
Model.Histogram.Item item = new Model.Histogram.Item();
String label = (String) pt.getColumnValue(e, 0);
item.setLabel(label);
if (e instanceof XClassHistogramRecord) {
int objectId = pt.getContext(e).getObjectId();
item.setObjectId(objectId);
item.setType(Model.Histogram.ItemType.CLASS);
} else {
item.setObjectId(label.hashCode());
item.setType(Model.Histogram.ItemType.PACKAGE);
}
if (label.matches("^int(\\[\\])*") || label.matches("^char(\\[\\])*") ||
label.matches("^byte(\\[\\])*") || label.matches("^short(\\[\\])*") ||
label.matches("^boolean(\\[\\])*") ||
label.matches("^double(\\[\\])*") ||
label.matches("^float(\\[\\])*") || label.matches("^long(\\[\\])*") ||
label.matches("^void(\\[\\])*")) {
item.setType(Model.Histogram.ItemType.CLASS);
}
item.setNumberOfObjects((Long) pt.getColumnValue(e, 1));
item.setShallowSize(((Bytes) pt.getColumnValue(e, 2)).getValue());
return item;
})
.sort(Model.Histogram.Item.sortBy(sortBy, ascendingOrder))
.done();
} else {
return PageView.empty();
}
}
default: {
throw new AnalysisException("Should not reach here");
}
}
});
}
private PageView<DominatorTree.DefaultItem> buildDefaultItems(ISnapshot snapshot, IResultTree tree,
List<?> elements,
boolean ascendingOrder, String sortBy,
String searchText, SearchType searchType,
PagingRequest pagingRequest) {
final AtomicInteger afterFilterCount = new AtomicInteger(0);
List<DominatorTree.DefaultItem> items = elements.stream()
.map(e -> $(() -> new VirtualDefaultItem(snapshot, tree, e)))
.filter(SearchPredicate.createPredicate(searchText, searchType))
.peek(filtered -> afterFilterCount.incrementAndGet())
.sorted(DominatorTree.DefaultItem.sortBy(sortBy, ascendingOrder))
.skip(pagingRequest.from())
.limit(pagingRequest.getPageSize())
.collect(Collectors.toList());
return new PageView(pagingRequest, afterFilterCount.get(), items);
}
private PageView<DominatorTree.ClassItem> buildClassItems(ISnapshot snapshot, IResultTree tree, List<?> elements,
boolean ascendingOrder,
String sortBy,
String searchText, SearchType searchType,
PagingRequest pagingRequest) {
final AtomicInteger afterFilterCount = new AtomicInteger(0);
List<DominatorTree.ClassItem> items = elements.stream()
.map(e -> $(() -> new VirtualClassItem(snapshot, tree, e)))
.filter(SearchPredicate.createPredicate(searchText, searchType))
.peek(filtered -> afterFilterCount.incrementAndGet())
.sorted(DominatorTree.ClassItem.sortBy(sortBy, ascendingOrder))
.skip(pagingRequest.from())
.limit(pagingRequest.getPageSize())
.collect(Collectors.toList());
return new PageView(pagingRequest, afterFilterCount.get(), items);
}
private PageView<DominatorTree.ClassLoaderItem> buildClassLoaderItems(ISnapshot snapshot, IResultTree tree,
List<?> elements, boolean ascendingOrder,
String sortBy,
String searchText, SearchType searchType,
PagingRequest pagingRequest) {
final AtomicInteger afterFilterCount = new AtomicInteger(0);
List<DominatorTree.ClassLoaderItem> items = elements.stream()
.map(e -> $(() -> new VirtualClassLoaderItem(snapshot, tree, e)))
.filter(SearchPredicate.createPredicate(searchText, searchType))
.peek(filtered -> afterFilterCount.incrementAndGet())
.sorted(DominatorTree.ClassLoaderItem.sortBy(sortBy, ascendingOrder))
.skip(pagingRequest.from())
.limit(pagingRequest.getPageSize())
.collect(Collectors.toList());
return new PageView(pagingRequest, afterFilterCount.get(), items);
}
private PageView<DominatorTree.PackageItem> buildPackageItems(ISnapshot snapshot, IResultTree tree,
List<?> elements,
boolean ascendingOrder, String sortBy,
String searchText, SearchType searchType,
PagingRequest pagingRequest) {
final AtomicInteger afterFilterCount = new AtomicInteger(0);
List<DominatorTree.PackageItem> items = elements.stream()
.map(e -> $(() -> new VirtualPackageItem(snapshot, tree, e)))
.filter(SearchPredicate.createPredicate(searchText, searchType))
.peek(filtered -> afterFilterCount.incrementAndGet())
.sorted(DominatorTree.PackageItem.sortBy(sortBy, ascendingOrder))
.skip(pagingRequest.from())
.limit(pagingRequest.getPageSize())
.collect(Collectors.toList());
return new PageView(pagingRequest, afterFilterCount.get(), items);
}
@Override
public PageView<? extends DominatorTree.Item> getRootsOfDominatorTree(DominatorTree.Grouping groupBy, String sortBy,
boolean ascendingOrder, String searchText,
SearchType searchType, int page,
int pageSize) {
return $(() -> {
Map<String, Object> args = new HashMap<>();
IResultTree tree = queryByCommand(context, "dominator_tree -groupBy " + groupBy.name(), args);
switch (groupBy) {
case NONE:
return
buildDefaultItems(context.snapshot, tree, tree.getElements(), ascendingOrder, sortBy,
searchText, searchType, new PagingRequest(page, pageSize));
case BY_CLASS:
return buildClassItems(context.snapshot, tree, tree.getElements(), ascendingOrder, sortBy,
searchText, searchType, new PagingRequest(page, pageSize));
case BY_CLASSLOADER:
return buildClassLoaderItems(context.snapshot, tree, tree.getElements(), ascendingOrder, sortBy,
searchText, searchType, new PagingRequest(page, pageSize));
case BY_PACKAGE:
return buildPackageItems(context.snapshot, tree, tree.getElements(), ascendingOrder, sortBy,
searchText, searchType, new PagingRequest(page, pageSize));
default:
throw new AnalysisException("Should not reach here");
}
});
}
@Override
public PageView<? extends DominatorTree.Item> getChildrenOfDominatorTree(DominatorTree.Grouping groupBy,
String sortBy, boolean ascendingOrder,
int parentObjectId,
int[] idPathInResultTree, int page,
int pageSize) {
return $(() -> {
Map<String, Object> args = new HashMap<>();
IResultTree tree = queryByCommand(context, "dominator_tree -groupBy " + groupBy.name(), args);
switch (groupBy) {
case NONE:
Object parent = Helper.fetchObjectInResultTree(tree, idPathInResultTree);
return
buildDefaultItems(context.snapshot, tree, tree.getChildren(parent), ascendingOrder, sortBy,
null, null, new PagingRequest(page, pageSize));
case BY_CLASS:
Object object = Helper.fetchObjectInResultTree(tree, idPathInResultTree);
List<?> elements = object == null ? Collections.emptyList() : tree.getChildren(object);
return buildClassItems(context.snapshot, tree, elements, ascendingOrder, sortBy, null, null, new PagingRequest(page
, pageSize));
case BY_CLASSLOADER:
List<?> children = new ExoticTreeFinder(tree)
.setGetChildrenCallback(tree::getChildren)
.setPredicate((theTree, theNode) -> theTree.getContext(theNode).getObjectId())
.findChildrenOf(parentObjectId);
if (children != null) {
return buildClassLoaderItems(context.snapshot, tree, children, ascendingOrder, sortBy, null,
null, new PagingRequest(page, pageSize));
} else {
return PageView.empty();
}
case BY_PACKAGE:
Object targetParentNode = new ExoticTreeFinder(tree)
.setGetChildrenCallback(node -> {
Map<String, ?> subPackages = ReflectionUtil.getFieldValueOrNull(node, "subPackages");
if (subPackages != null) {
return new ArrayList<>(subPackages.values());
} else {
return null;
}
})
.setPredicate((theTree, theNode) -> {
try {
java.lang.reflect.Field
field =
theNode.getClass().getSuperclass().getSuperclass().getDeclaredField("label");
field.setAccessible(true);
String labelName = (String) field.get(theNode);
return labelName.hashCode();
} catch (Throwable e) {
e.printStackTrace();
}
return null;
})
.findTargetParentNode(parentObjectId);
if (targetParentNode != null) {
Map<String, ?> packageMap = ReflectionUtil.getFieldValueOrNull(targetParentNode, "subPackages");
List<?> elems = new ArrayList<>();
if (packageMap != null) {
if (packageMap.size() == 0) {
elems = ReflectionUtil.getFieldValueOrNull(targetParentNode, "classes");
} else {
elems = new ArrayList<>(packageMap.values());
}
}
if (elems != null) {
return
buildPackageItems(context.snapshot, tree, elems, ascendingOrder, sortBy, null, null,
new PagingRequest(page, pageSize));
} else {
return PageView.empty();
}
} else {
return PageView.empty();
}
default:
throw new AnalysisException("Should not reach here");
}
});
}
interface R {
void run() throws Exception;
}
interface RV<V> {
V run() throws Exception;
}
private static class ProviderImpl implements HeapDumpAnalyzer.Provider {
@Override
public HeapDumpAnalyzer provide(Path path, Map<String, String> arguments,
ProgressListener listener) {
return new HeapDumpAnalyzerImpl(new AnalysisContext(
$(() ->
{
try {
HprofPreferencesAccess.setStrictness(arguments.get("strictness"));
return SnapshotFactory.openSnapshot(path.toFile(),
arguments,
new ProgressListenerImpl(listener));
} finally {
HprofPreferencesAccess.setStrictness(null);
}
})
));
}
}
}
| 6,627 |
0 | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/impl/src/main/java/org/eclipse/jifa/hda | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/impl/src/main/java/org/eclipse/jifa/hda/impl/VirtualClassLoaderItem.java | /********************************************************************************
* Copyright (c) 2020 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.hda.impl;
import org.eclipse.jifa.common.util.EscapeUtil;
import org.eclipse.mat.SnapshotException;
import org.eclipse.mat.query.Bytes;
import org.eclipse.mat.query.IContextObjectSet;
import org.eclipse.mat.query.IStructuredResult;
import org.eclipse.mat.snapshot.ISnapshot;
import org.eclipse.jifa.common.util.UseAccessor;
import org.eclipse.jifa.hda.api.AnalysisException;
import static org.eclipse.jifa.hda.api.Model.DominatorTree;
@UseAccessor
public class VirtualClassLoaderItem extends DominatorTree.ClassLoaderItem {
static final int COLUMN_LABEL = 0;
static final int COLUMN_OBJECTS = 1;
static final int COLUMN_SHALLOW = 2;
static final int COLUMN_RETAINED = 3;
static final int COLUMN_PERCENT = 4;
transient final ISnapshot snapshot;
transient final IStructuredResult results;
transient final Object e;
public VirtualClassLoaderItem(final ISnapshot snapshot, final IStructuredResult results, final Object e) {
this.snapshot = snapshot;
this.results = results;
this.e = e;
this.objectId = results.getContext(e).getObjectId();
}
@Override
public String getSuffix() {
return null;
}
@Override
public int getObjectId() {
return objectId;
}
@Override
public int getObjectType() {
try {
return HeapDumpAnalyzerImpl.typeOf(snapshot.getObject(objectId));
} catch (SnapshotException se) {
throw new AnalysisException(se);
}
}
@Override
public boolean isGCRoot() {
return snapshot.isGCRoot(objectId);
}
@Override
public String getLabel() {
return EscapeUtil.unescapeLabel((String) results.getColumnValue(e, COLUMN_LABEL));
}
@Override
public long getObjects() {
Object value = results.getColumnValue(e, COLUMN_OBJECTS);
if (value != null) {
return (Integer) value;
} else {
return 0;
}
}
@Override
public int[] getObjectIds() {
return ((IContextObjectSet) results.getContext(e)).getObjectIds();
}
@Override
public long getShallowSize() {
return ((Bytes) results.getColumnValue(e, COLUMN_SHALLOW)).getValue();
}
@Override
public long getRetainedSize() {
return ((Bytes) results.getColumnValue(e, COLUMN_RETAINED)).getValue();
}
@Override
public double getPercent() {
return (Double) results.getColumnValue(e, COLUMN_PERCENT);
}
} | 6,628 |
0 | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/impl/src/main/java/org/eclipse/jifa/hda | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/impl/src/main/java/org/eclipse/jifa/hda/impl/ProgressListenerImpl.java | /********************************************************************************
* Copyright (c) 2021, 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.hda.impl;
import org.eclipse.jifa.hda.api.FilterProgressListener;
import org.eclipse.jifa.common.listener.ProgressListener;
import org.eclipse.mat.util.IProgressListener;
public class ProgressListenerImpl extends FilterProgressListener implements IProgressListener {
private boolean cancelled = false;
public ProgressListenerImpl(ProgressListener listener) {
super(listener);
}
@Override
public void done() {
}
@Override
public boolean isCanceled() {
return cancelled;
}
@Override
public void setCanceled(boolean b) {
cancelled = b;
}
@Override
public void sendUserMessage(Severity severity, String s, Throwable throwable) {
sendUserMessage(Level.valueOf(severity.name()), s, throwable);
}
}
| 6,629 |
0 | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/impl/src/main/java/org/eclipse/jifa/hda | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/impl/src/main/java/org/eclipse/jifa/hda/impl/VirtualPackageItem.java | /********************************************************************************
* Copyright (c) 2020 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.hda.impl;
import org.eclipse.jifa.common.util.EscapeUtil;
import org.eclipse.mat.query.Bytes;
import org.eclipse.mat.query.IContextObjectSet;
import org.eclipse.mat.query.IStructuredResult;
import org.eclipse.mat.snapshot.ISnapshot;
import org.eclipse.jifa.common.util.ReflectionUtil;
import org.eclipse.jifa.common.util.UseAccessor;
import static org.eclipse.jifa.hda.api.Model.DominatorTree;
import java.util.Map;
@UseAccessor
public class VirtualPackageItem extends DominatorTree.PackageItem {
static final int COLUMN_LABEL = 0;
static final int COLUMN_OBJECTS = 1;
static final int COLUMN_SHALLOW = 2;
static final int COLUMN_RETAINED = 3;
static final int COLUMN_PERCENT = 4;
transient final ISnapshot snapshot;
transient final IStructuredResult results;
transient final Object e;
public VirtualPackageItem(final ISnapshot snapshot, final IStructuredResult results, final Object e) {
this.snapshot = snapshot;
this.results = results;
this.e = e;
this.objectId = results.getContext(e).getObjectId();
this.isObjType = false;
}
@Override
public String getSuffix() {
return null;
}
@Override
public int getObjectId() {
return getLabel().hashCode();
}
@Override
public int getObjectType() {
Map<String, ?> subPackages = ReflectionUtil.getFieldValueOrNull(e, "subPackages");
if (subPackages.size() == 0) {
return DominatorTree.ItemType.CLASS;
} else {
return DominatorTree.ItemType.PACKAGE;
}
}
@Override
public boolean isGCRoot() {
return snapshot.isGCRoot(objectId);
}
@Override
public String getLabel() {
return EscapeUtil.unescapeLabel((String) results.getColumnValue(e, COLUMN_LABEL));
}
@Override
public long getObjects() {
Object value = results.getColumnValue(e, COLUMN_OBJECTS);
if (value != null) {
return (Integer) value;
} else {
return 0;
}
}
@Override
public int[] getObjectIds() {
return ((IContextObjectSet) results.getContext(e)).getObjectIds();
}
@Override
public long getShallowSize() {
return ((Bytes) results.getColumnValue(e, COLUMN_SHALLOW)).getValue();
}
@Override
public long getRetainedSize() {
return ((Bytes) results.getColumnValue(e, COLUMN_RETAINED)).getValue();
}
@Override
public double getPercent() {
return (Double) results.getColumnValue(e, COLUMN_PERCENT);
}
} | 6,630 |
0 | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/provider/src/main/java/org/eclipse/jifa/hdp | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/provider/src/main/java/org/eclipse/jifa/hdp/provider/MATProvider.java | /********************************************************************************
* Copyright (c) 2021, 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.hdp.provider;
import org.eclipse.jifa.common.JifaException;
import org.eclipse.jifa.hda.api.HeapDumpAnalyzer;
import org.eclipse.jifa.common.listener.ProgressListener;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleException;
import org.osgi.framework.Constants;
import org.osgi.framework.launch.Framework;
import org.osgi.framework.launch.FrameworkFactory;
import java.io.File;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.ServiceLoader;
public class MATProvider implements HeapDumpAnalyzer.Provider {
private HeapDumpAnalyzer.Provider provider;
public MATProvider() {
init();
}
@Override
public HeapDumpAnalyzer provide(Path path, Map<String, String> arguments, ProgressListener listener) {
return provider.provide(path, arguments, listener);
}
private void init() {
Map<String, String> config = new HashMap<>();
config.put(Constants.FRAMEWORK_STORAGE_CLEAN, Constants.FRAMEWORK_STORAGE_CLEAN_ONFIRSTINIT);
String apiBase = "org.eclipse.jifa.hda.api";
String commonBase = "org.eclipse.jifa.common";
String[] extras = {
apiBase,
commonBase,
commonBase + ".aux",
commonBase + ".enums",
commonBase + ".request",
commonBase + ".util",
commonBase + ".cache",
commonBase + ".vo",
commonBase + ".vo.support",
commonBase + ".listener",
"net.sf.cglib.beans",
"net.sf.cglib.core",
"net.sf.cglib.core.internal",
"net.sf.cglib.proxy",
"net.sf.cglib.reflect",
"net.sf.cglib.transform",
"net.sf.cglib.transform.impl",
"net.sf.cglib.util",
};
config.put(Constants.FRAMEWORK_SYSTEMPACKAGES_EXTRA, String.join(",", extras));
try {
Framework framework = ServiceLoader.load(FrameworkFactory.class).iterator().next().newFramework(config);
framework.start();
File[] files = Objects.requireNonNull(new File(System.getProperty("mat-deps")).listFiles());
List<Bundle> bundles = new ArrayList<>();
for (File file : files) {
String name = file.getName();
// org.eclipse.osgi is the system bundle
if (name.endsWith(".jar") && !name.startsWith("org.eclipse.osgi_")) {
Bundle b = framework.getBundleContext().installBundle(file.toURI().toString());
bundles.add(b);
}
}
ArrayList validNames = new ArrayList();
validNames.add("org.apache.felix.scr");
validNames.add("org.eclipse.equinox.event");
validNames.add("org.eclipse.jifa.hda.implementation");
for (Bundle bundle : bundles) {
if (validNames.contains(bundle.getSymbolicName())) {
System.out.println("starting bundle: " + bundle);
bundle.start();
}
}
provider = framework.getBundleContext()
.getService(framework.getBundleContext()
.getServiceReference(HeapDumpAnalyzer.Provider.class));
} catch (BundleException be) {
throw new JifaException(be);
}
}
}
| 6,631 |
0 | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/api/src/main/java/org/eclipse/jifa/hda | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/api/src/main/java/org/eclipse/jifa/hda/api/FilterProgressListener.java | /********************************************************************************
* Copyright (c) 2021, 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.hda.api;
import org.eclipse.jifa.common.listener.ProgressListener;
public class FilterProgressListener implements ProgressListener {
ProgressListener listener;
public FilterProgressListener(ProgressListener listener) {
assert listener != null;
this.listener = listener;
}
@Override
public void beginTask(String s, int i) {
listener.beginTask(s, i);
}
@Override
public void subTask(String s) {
listener.subTask(s);
}
@Override
public void worked(int i) {
listener.worked(i);
}
@Override
public void sendUserMessage(Level level, String s, Throwable throwable) {
listener.sendUserMessage(level, s, throwable);
}
@Override
public String log() {
return listener.log();
}
@Override
public double percent() {
return listener.percent();
}
}
| 6,632 |
0 | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/api/src/main/java/org/eclipse/jifa/hda | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/api/src/main/java/org/eclipse/jifa/hda/api/AnalysisException.java | /********************************************************************************
* Copyright (c) 2021 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.hda.api;
public class AnalysisException extends RuntimeException {
public AnalysisException(String message) {
super(message);
}
public AnalysisException(Throwable cause) {
super(cause);
}
}
| 6,633 |
0 | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/api/src/main/java/org/eclipse/jifa/hda | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/api/src/main/java/org/eclipse/jifa/hda/api/HeapDumpAnalyzer.java | /********************************************************************************
* Copyright (c) 2021, 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.hda.api;
import org.eclipse.jifa.common.listener.ProgressListener;
import org.eclipse.jifa.common.vo.PageView;
import org.eclipse.jifa.common.vo.support.SearchType;
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
import static org.eclipse.jifa.hda.api.Model.ClassLoader;
import static org.eclipse.jifa.hda.api.Model.*;
public interface HeapDumpAnalyzer {
void dispose();
Overview.Details getDetails();
Map<String, String> getSystemProperties();
List<Overview.BigObject> getBigObjects();
JavaObject getObjectInfo(int objectId);
InspectorView getInspectorView(int objectId);
PageView<FieldView> getFields(int objectId, int page, int pageSize);
PageView<FieldView> getStaticFields(int objectId, int page, int pageSize);
int mapAddressToId(long address);
String getObjectValue(int objectId);
ClassLoader.Summary getSummaryOfClassLoaders();
PageView<ClassLoader.Item> getClassLoaders(int page, int pageSize);
PageView<ClassLoader.Item> getChildrenOfClassLoader(int classLoaderId,
int page, int pageSize);
UnreachableObject.Summary getSummaryOfUnreachableObjects();
PageView<UnreachableObject.Item> getUnreachableObjects(int page, int pageSize);
DirectByteBuffer.Summary getSummaryOfDirectByteBuffers();
PageView<DirectByteBuffer.Item> getDirectByteBuffers(int page, int pageSize);
PageView<JavaObject> getOutboundOfObject(int objectId, int page, int pageSize);
PageView<JavaObject> getInboundOfObject(int objectId, int page, int pageSize);
List<GCRoot.Item> getGCRoots();
PageView<TheString.Item> getStrings(String pattern, int page, int pageSize);
PageView<GCRoot.Item> getClassesOfGCRoot(int rootTypeIndex, int page, int pageSize);
PageView<JavaObject> getObjectsOfGCRoot(int rootTypeIndex, int classIndex, int page, int pageSize);
ClassReferrer.Item getOutboundClassOfClassReference(int objectId);
ClassReferrer.Item getInboundClassOfClassReference(int objectId);
PageView<ClassReferrer.Item> getOutboundsOfClassReference(int[] objectIds, int page, int pageSize);
PageView<ClassReferrer.Item> getInboundsOfClassReference(int[] objectIds, int page, int pageSize);
Comparison.Summary getSummaryOfComparison(Path other);
PageView<Comparison.Item> getItemsOfComparison(Path other, int page, int pageSize);
PageView<GCRootPath.MergePathToGCRootsTreeNode> getRootsOfMergePathToGCRootsByClassId(int classId,
GCRootPath.Grouping grouping,
int page, int pageSize);
PageView<GCRootPath.MergePathToGCRootsTreeNode> getRootsOfMergePathToGCRootsByObjectIds(int[] objectIds,
GCRootPath.Grouping grouping,
int page, int pageSize);
PageView<GCRootPath.MergePathToGCRootsTreeNode> getChildrenOfMergePathToGCRootsByClassId(int classId,
int[] objectIdPathInGCPathTree,
GCRootPath.Grouping grouping,
int page, int pageSize);
PageView<GCRootPath.MergePathToGCRootsTreeNode> getChildrenOfMergePathToGCRootsByObjectIds(int[] objectIds,
int[] objectIdPathInGCPathTree,
GCRootPath.Grouping grouping,
int page, int pageSize);
GCRootPath.Item getPathToGCRoots(int originId, int skip, int count);
LeakReport getLeakReport();
OQLResult getOQLResult(String oql, String sortBy, boolean ascendingOrder, int page, int pageSize);
CalciteSQLResult getCalciteSQLResult(String sql, String sortBy, boolean ascendingOrder, int page, int pageSize);
Model.Thread.Summary getSummaryOfThreads(String searchText, SearchType searchType);
PageView<Model.Thread.Item> getThreads(String sortBy, boolean ascendingOrder, String searchText,
SearchType searchType, int page, int pageSize);
List<Model.Thread.StackFrame> getStackTrace(int objectId);
List<Model.Thread.LocalVariable> getLocalVariables(int objectId, int depth, boolean firstNonNativeFrame);
PageView<DuplicatedClass.ClassItem> getDuplicatedClasses(String searchText, SearchType searchType,
int page, int pageSize);
PageView<DuplicatedClass.ClassLoaderItem> getClassloadersOfDuplicatedClass(int index, int page,
int pageSize);
PageView<Histogram.Item> getHistogram(Histogram.Grouping groupingBy, int[] ids,
String sortBy, boolean ascendingOrder,
String searchText, SearchType searchType, int page, int pageSize);
PageView<JavaObject> getHistogramObjects(int classId, int page, int pageSize);
PageView<Histogram.Item> getChildrenOfHistogram(Histogram.Grouping groupBy, int[] ids,
String sortBy, boolean ascendingOrder, int parentObjectId,
int page, int pageSize);
PageView<? extends DominatorTree.Item> getRootsOfDominatorTree(DominatorTree.Grouping groupBy,
String sortBy,
boolean ascendingOrder, String searchText,
SearchType searchType, int page, int pageSize);
PageView<? extends DominatorTree.Item> getChildrenOfDominatorTree(DominatorTree.Grouping groupBy,
String sortBy, boolean ascendingOrder,
int parentObjectId, int[] idPathInResultTree,
int page,
int pageSize);
interface Provider {
HeapDumpAnalyzer provide(Path path, Map<String, String> arguments, ProgressListener listener);
}
}
| 6,634 |
0 | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/api/src/main/java/org/eclipse/jifa/hda | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/api/src/main/java/org/eclipse/jifa/hda/api/Model.java | /********************************************************************************
* Copyright (c) 2021 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.hda.api;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import org.eclipse.jifa.common.util.ErrorUtil;
import org.eclipse.jifa.common.vo.PageView;
import org.eclipse.jifa.common.vo.support.SearchType;
import org.eclipse.jifa.common.vo.support.Searchable;
import org.eclipse.jifa.common.vo.support.SortTableGenerator;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
public interface Model {
interface DominatorTree {
interface ItemType {
int CLASS = 1;
int CLASS_LOADER = 2;
int SUPER_CLASS = 5;
int PACKAGE = 6;
}
enum Grouping {
NONE,
BY_CLASS,
BY_CLASSLOADER,
BY_PACKAGE;
}
@Data
class Item {
public String label;
public String suffix;
public int objectId;
public int objectType;
public boolean gCRoot;
public long shallowSize;
public long retainedSize;
public double percent;
public boolean isObjType = true;
}
@Data
@EqualsAndHashCode(callSuper = true)
class ClassLoaderItem extends Item implements Searchable {
private static Map<String, Comparator<ClassLoaderItem>> sortTable =
new SortTableGenerator<ClassLoaderItem>()
.add("id", ClassLoaderItem::getObjectId)
.add("shallowHeap", ClassLoaderItem::getShallowSize)
.add("retainedHeap", ClassLoaderItem::getRetainedSize)
.add("percent", ClassLoaderItem::getPercent)
.add("Objects", ClassLoaderItem::getObjects)
.build();
public long objects;
private int[] objectIds;
public static Comparator<ClassLoaderItem> sortBy(String field, boolean ascendingOrder) {
return ascendingOrder ? sortTable.get(field) : sortTable.get(field).reversed();
}
@Override
public Object getBySearchType(SearchType type) {
switch (type) {
case BY_NAME:
return getLabel();
case BY_PERCENT:
return getPercent();
case BY_OBJ_NUM:
return getObjects();
case BY_RETAINED_SIZE:
return getRetainedSize();
case BY_SHALLOW_SIZE:
return getShallowSize();
default:
ErrorUtil.shouldNotReachHere();
}
return null;
}
}
@Data
@EqualsAndHashCode(callSuper = true)
class ClassItem extends Item implements Searchable {
private static Map<String, Comparator<ClassItem>> sortTable = new SortTableGenerator<ClassItem>()
.add("id", ClassItem::getObjectId)
.add("shallowHeap", ClassItem::getShallowSize)
.add("retainedHeap", ClassItem::getRetainedSize)
.add("percent", ClassItem::getPercent)
.add("Objects", ClassItem::getObjects)
.build();
private int objects;
private int[] objectIds;
public static Comparator<ClassItem> sortBy(String field, boolean ascendingOrder) {
return ascendingOrder ? sortTable.get(field) : sortTable.get(field).reversed();
}
@Override
public Object getBySearchType(SearchType type) {
switch (type) {
case BY_NAME:
return getLabel();
case BY_PERCENT:
return getPercent();
case BY_OBJ_NUM:
return getObjects();
case BY_RETAINED_SIZE:
return getRetainedSize();
case BY_SHALLOW_SIZE:
return getShallowSize();
default:
ErrorUtil.shouldNotReachHere();
}
return null;
}
}
@Data
@EqualsAndHashCode(callSuper = true)
class DefaultItem extends Item implements Searchable {
private static Map<String, Comparator<DefaultItem>> sortTable = new SortTableGenerator<DefaultItem>()
.add("id", DefaultItem::getObjectId)
.add("shallowHeap", DefaultItem::getShallowSize)
.add("retainedHeap", DefaultItem::getRetainedSize)
.add("percent", DefaultItem::getPercent)
.build();
public static Comparator<DefaultItem> sortBy(String field, boolean ascendingOrder) {
return ascendingOrder ? sortTable.get(field) : sortTable.get(field).reversed();
}
@Override
public Object getBySearchType(SearchType type) {
switch (type) {
case BY_NAME:
return getLabel();
case BY_PERCENT:
return getPercent();
case BY_OBJ_NUM:
return null;
case BY_RETAINED_SIZE:
return getRetainedSize();
case BY_SHALLOW_SIZE:
return getShallowSize();
default:
ErrorUtil.shouldNotReachHere();
}
return null;
}
}
@Data
@EqualsAndHashCode(callSuper = true)
class PackageItem extends Item implements Searchable {
private static Map<String, Comparator<PackageItem>> sortTable = new SortTableGenerator<PackageItem>()
.add("id", PackageItem::getObjectId)
.add("shallowHeap", PackageItem::getShallowSize)
.add("retainedHeap", PackageItem::getRetainedSize)
.add("percent", PackageItem::getPercent)
.add("Objects", PackageItem::getObjects)
.build();
private long objects;
private int[] objectIds;
public static Comparator<PackageItem> sortBy(String field, boolean ascendingOrder) {
return ascendingOrder ? sortTable.get(field) : sortTable.get(field).reversed();
}
@Override
public Object getBySearchType(SearchType type) {
switch (type) {
case BY_NAME:
return getLabel();
case BY_PERCENT:
return getPercent();
case BY_OBJ_NUM:
return getObjects();
case BY_RETAINED_SIZE:
return getRetainedSize();
case BY_SHALLOW_SIZE:
return getShallowSize();
default:
ErrorUtil.shouldNotReachHere();
}
return null;
}
}
}
interface Histogram {
enum Grouping {
BY_CLASS,
BY_SUPERCLASS,
BY_CLASSLOADER,
BY_PACKAGE;
}
interface ItemType {
int CLASS = 1;
int CLASS_LOADER = 2;
int SUPER_CLASS = 5;
int PACKAGE = 6;
}
@Data
@NoArgsConstructor
class Item implements Searchable {
private static Map<String, Comparator<Item>> sortTable = new SortTableGenerator<Item>()
.add("id", Item::getObjectId)
.add("numberOfObjects", Item::getNumberOfObjects)
.add("shallowSize", Item::getShallowSize)
.add("retainedSize", Item::getRetainedSize)
.build();
public long numberOfObjects;
public long shallowSize;
public long retainedSize;
public String label;
public int objectId;
public int type;
public Item(int objectId, String label, int type, long numberOfObjects, long shallowSize,
long retainedSize) {
this.objectId = objectId;
this.label = label;
this.type = type;
this.numberOfObjects = numberOfObjects;
this.shallowSize = shallowSize;
this.retainedSize = retainedSize;
}
public static Comparator<Item> sortBy(String field, boolean ascendingOrder) {
return ascendingOrder ? sortTable.get(field) : sortTable.get(field).reversed();
}
@Override
public Object getBySearchType(SearchType type) {
switch (type) {
case BY_NAME:
return getLabel();
case BY_OBJ_NUM:
return getNumberOfObjects();
case BY_RETAINED_SIZE:
return getRetainedSize();
case BY_SHALLOW_SIZE:
return getShallowSize();
default:
ErrorUtil.shouldNotReachHere();
}
return null;
}
}
}
interface DuplicatedClass {
@Data
class ClassItem implements Searchable {
public String label;
public int count;
@Override
public Object getBySearchType(SearchType type) {
switch (type) {
case BY_NAME:
return getLabel();
case BY_CLASSLOADER_COUNT:
return (long) getCount();
default:
ErrorUtil.shouldNotReachHere();
}
return null;
}
}
@Data
class ClassLoaderItem {
public String label;
public String suffix;
public int definedClassesCount;
public int instantiatedObjectsCount;
public int objectId;
public boolean gCRoot;
}
}
interface Thread {
@Data
class Summary {
public long totalSize;
public long shallowHeap;
public long retainedHeap;
}
@Data
class Item implements Searchable {
public static Map<String, Comparator<Item>> sortTable = new SortTableGenerator<Item>()
.add("id", Item::getObjectId)
.add("shallowHeap", Item::getShallowSize)
.add("retainedHeap", Item::getRetainedSize)
.add("daemon", Item::isDaemon)
.add("contextClassLoader", Item::getContextClassLoader)
.add("name", Item::getName)
.build();
public int objectId;
public String object;
public String name;
public long shallowSize;
public long retainedSize;
public String contextClassLoader;
public boolean hasStack;
public boolean daemon;
public Item(int objectId, String object, String name, long shallowSize, long retainedSize,
String contextClassLoader, boolean hasStack, boolean daemon) {
this.objectId = objectId;
this.object = object;
this.name = name;
this.shallowSize = shallowSize;
this.retainedSize = retainedSize;
this.contextClassLoader = contextClassLoader;
this.hasStack = hasStack;
this.daemon = daemon;
}
public Item() {}
public static Comparator<Item> sortBy(String field, boolean ascendingOrder) {
return ascendingOrder ? sortTable.get(field) : sortTable.get(field).reversed();
}
@Override
public Object getBySearchType(SearchType type) {
switch (type) {
case BY_NAME:
return getName();
case BY_SHALLOW_SIZE:
return getShallowSize();
case BY_RETAINED_SIZE:
return getRetainedSize();
case BY_CONTEXT_CLASSLOADER_NAME:
return getContextClassLoader();
default:
ErrorUtil.shouldNotReachHere();
}
return null;
}
}
@Data
@EqualsAndHashCode(callSuper = true)
class LocalVariable extends JavaObject {
}
@Data
class StackFrame {
public String stack;
public boolean hasLocal;
public boolean firstNonNativeFrame;
public long maxLocalsRetainedSize;
public StackFrame(String stack, boolean hasLocal, long maxLocalsRetainedSize) {
this.stack = stack;
this.hasLocal = hasLocal;
this.maxLocalsRetainedSize = maxLocalsRetainedSize;
}
}
}
interface CalciteSQLResult {
int TREE = 1;
int TABLE = 2;
int TEXT = 3;
int getType();
@Data
class TableResult implements CalciteSQLResult {
public int type = TABLE;
public List<String> columns;
public PageView<Entry> pv;
public TableResult(List<String> columns, PageView<Entry> pv) {
this.columns = columns;
this.pv = pv;
}
@Data
public static class Entry {
public int objectId;
public List<Object> values;
public Entry(int objectId, List<Object> values) {
this.objectId = objectId;
this.values = values;
}
}
}
@Data
class TextResult implements CalciteSQLResult {
public int type = CalciteSQLResult.TEXT;
public String text;
public TextResult(String text) {
this.text = text;
}
}
@Data
class TreeResult implements CalciteSQLResult {
public PageView<JavaObject> pv;
public int type = TREE;
public TreeResult(PageView<JavaObject> pv) {
this.pv = pv;
}
}
}
interface OQLResult {
int TREE = 1;
int TABLE = 2;
int TEXT = 3;
int getType();
@Data
class TableResult implements OQLResult {
public int type = TABLE;
public List<String> columns;
public PageView<Entry> pv;
public TableResult(List<String> columns, PageView<Entry> pv) {
this.columns = columns;
this.pv = pv;
}
@Data
public static class Entry {
public int objectId;
public List<Object> values;
public Entry(int objectId, List<Object> values) {
this.objectId = objectId;
this.values = values;
}
}
}
@Data
class TextResult implements OQLResult {
public int type = OQLResult.TEXT;
public String text;
public TextResult(String text) {
this.text = text;
}
}
@Data
class TreeResult implements OQLResult {
public PageView<JavaObject> pv;
public int type = TREE;
public TreeResult(PageView<JavaObject> pv) {
this.pv = pv;
}
}
}
interface GCRootPath {
List<String> EXCLUDES = Arrays.asList("java.lang.ref.WeakReference:referent",
"java.lang.ref.SoftReference:referent");
enum Grouping {
FROM_GC_ROOTS,
FROM_GC_ROOTS_BY_CLASS,
FROM_OBJECTS_BY_CLASS
}
@Data
class MergePathToGCRootsTreeNode {
public int objectId;
public String className;
public int refObjects;
public long shallowHeap;
public long refShallowHeap;
public long retainedHeap;
public String suffix;
public int objectType;
public boolean gCRoot;
}
@Data
class Item {
public Node tree;
public int count;
public boolean hasMore;
}
@Data
@EqualsAndHashCode(callSuper = true)
class Node extends JavaObject {
public boolean origin;
public List<Node> children = new ArrayList<>();
public void addChild(Node child) {
children.add(child);
}
public Node getChild(int objectId) {
for (Node child : children) {
if (child.getObjectId() == objectId) {
return child;
}
}
return null;
}
}
}
interface ClassReferrer {
interface Type {
int NEW = 0;
int MIXED = 1;
int OLD_FAD = 2;
}
@Data
class Item {
public String label;
public int objects;
public long shallowSize;
public int objectId;
public int[] objectIds;
public int type;
}
}
interface Comparison {
@Data
class Summary {
public int totalSize;
public long objects;
public long shallowSize;
}
@Data
class Item {
public String className;
public long objects;
public long shallowSize;
}
}
interface TheString {
@Data
class Item {
public int objectId;
public String label;
public long shallowSize;
public long retainedSize;
}
}
interface GCRoot {
@Data
class Item {
public String className;
public int objects;
public int objectId;
public long shallowSize;
public long retainedSize;
}
}
interface DirectByteBuffer {
@Data
class Item {
public int objectId;
public String label;
public int position;
public int limit;
public int capacity;
}
@Data
class Summary {
public int totalSize;
public long position;
public long limit;
public long capacity;
}
}
interface UnreachableObject {
@Data
class Item {
public int objectId;
public String className;
public int objects;
public long shallowSize;
}
@Data
class Summary {
public int totalSize;
public int objects;
public long shallowSize;
}
}
interface Overview {
@Data
class BigObject {
public String label;
public int objectId;
public double value;
public String description;
public BigObject(String label, int objectId, double value, String description) {
this.label = label;
this.objectId = objectId;
this.value = value;
this.description = description;
}
}
@Data
class Details {
public String jvmInfo;
public int identifierSize;
public long creationDate;
public int numberOfObjects;
public int numberOfGCRoots;
public int numberOfClasses;
public int numberOfClassLoaders;
public long usedHeapSize;
public boolean generationInfoAvailable;
public Details(String jvmInfo, int identifierSize, long creationDate, int numberOfObjects,
int numberOfGCRoots,
int numberOfClasses, int numberOfClassLoaders, long usedHeapSize,
boolean generationInfoAvailable) {
this.jvmInfo = jvmInfo;
this.identifierSize = identifierSize;
this.creationDate = creationDate;
this.numberOfObjects = numberOfObjects;
this.numberOfGCRoots = numberOfGCRoots;
this.numberOfClasses = numberOfClasses;
this.numberOfClassLoaders = numberOfClassLoaders;
this.usedHeapSize = usedHeapSize;
this.generationInfoAvailable = generationInfoAvailable;
}
}
}
interface ClassLoader {
@Data
class Item {
public int objectId;
public String prefix;
public String label;
public boolean classLoader;
public boolean hasParent;
public int definedClasses;
public int numberOfInstances;
}
@Data
class Summary {
public int totalSize;
public int definedClasses;
public int numberOfInstances;
}
}
@Data
class LeakReport {
public boolean useful;
public String info;
public String name;
public List<Slice> slices;
public List<Record> records;
@Data
public static class Slice {
public String label;
public int objectId;
public double value;
public String desc;
public Slice(String label, int objectId, double value, String desc) {
this.label = label;
this.objectId = objectId;
this.value = value;
this.desc = desc;
}
}
@Data
public static class Record {
public String name;
public String desc;
public int index;
public List<ShortestPath> paths;
}
@Data
public static class ShortestPath {
public String label;
public long shallowSize;
public long retainedSize;
public int objectId;
public int objectType;
public boolean gCRoot;
public List<ShortestPath> children;
}
}
@Data
class JavaObject {
public static final int CLASS_TYPE = 1;
public static final int CLASS_LOADER_TYPE = 2;
public static final int ARRAY_TYPE = 3;
public static final int NORMAL_TYPE = 4;
// FIXME: can we generate these code automatically?
public static Map<String, Comparator<JavaObject>> sortTable = new SortTableGenerator<JavaObject>()
.add("id", JavaObject::getObjectId)
.add("shallowHeap", JavaObject::getShallowSize)
.add("retainedHeap", JavaObject::getRetainedSize)
.add("label", JavaObject::getLabel)
.build();
public int objectId;
public String prefix;
public String label;
public String suffix;
public long shallowSize;
public long retainedSize;
public boolean hasInbound;
public boolean hasOutbound;
public int objectType;
public boolean gCRoot;
public static Comparator<JavaObject> sortBy(String field, boolean ascendingOrder) {
return ascendingOrder ? sortTable.get(field) : sortTable.get(field).reversed();
}
}
@Data
class InspectorView {
public long objectAddress;
public String name;
public boolean gCRoot;
public int objectType;
public String classLabel;
public boolean classGCRoot;
public String superClassName;
public String classLoaderLabel;
public boolean classLoaderGCRoot;
public long shallowSize;
public long retainedSize;
public String gcRootInfo;
}
class FieldView {
public int fieldType;
public String name;
public String value;
public int objectId;
public FieldView(int fieldType, String name, String value) {
this.fieldType = fieldType;
this.name = name;
this.value = value;
}
public FieldView(int fieldType, String name, String value, int objectId) {
this(fieldType, name, value);
this.objectId = objectId;
}
public FieldView() {
}
}
}
| 6,635 |
0 | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/hook/src/main/java/org/eclipse/mat/hprof | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/hook/src/main/java/org/eclipse/mat/hprof/ui/HprofPreferences.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.mat.hprof.ui;
import org.eclipse.core.runtime.Platform;
import org.eclipse.mat.hprof.HprofPlugin;
public class HprofPreferences {
public static final String STRICTNESS_PREF = "hprofStrictness"; //$NON-NLS-1$
public static final HprofStrictness DEFAULT_STRICTNESS = HprofStrictness.STRICTNESS_STOP;
public static final String ADDITIONAL_CLASS_REFERENCES = "hprofAddClassRefs"; //$NON-NLS-1$
public static ThreadLocal<HprofStrictness> TL = new ThreadLocal<>();
public static void setStrictness(HprofStrictness strictness) {
TL.set(strictness);
}
public static HprofStrictness getCurrentStrictness() {
HprofStrictness strictness = TL.get();
return strictness != null ? strictness : DEFAULT_STRICTNESS;
}
public static boolean useAdditionalClassReferences() {
return Platform.getPreferencesService().getBoolean(HprofPlugin.getDefault().getBundle().getSymbolicName(),
HprofPreferences.ADDITIONAL_CLASS_REFERENCES, false, null);
}
public enum HprofStrictness {
STRICTNESS_STOP("hprofStrictnessStop"), //$NON-NLS-1$
STRICTNESS_WARNING("hprofStrictnessWarning"), //$NON-NLS-1$
STRICTNESS_PERMISSIVE("hprofStrictnessPermissive"); //$NON-NLS-1$
private final String name;
HprofStrictness(String name) {
this.name = name;
}
public static HprofStrictness parse(String value) {
if (value != null && value.length() > 0) {
for (HprofStrictness strictness : values()) {
if (strictness.toString().equals(value)) {
return strictness;
}
}
}
return DEFAULT_STRICTNESS;
}
@Override
public String toString() {
return name;
}
}
}
| 6,636 |
0 | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/hook/src/main/java/org/eclipse/mat/hprof | Create_ds/eclipse-jifa/backend/heap-dump-analyzer/hook/src/main/java/org/eclipse/mat/hprof/extension/HprofPreferencesAccess.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.mat.hprof.extension;
import org.eclipse.mat.hprof.ui.HprofPreferences;
public final class HprofPreferencesAccess {
private static HprofPreferences.HprofStrictness parseStrictness(String strictness) {
if (strictness == null) {
return HprofPreferences.DEFAULT_STRICTNESS;
}
switch (strictness) {
case "warn":
return HprofPreferences.HprofStrictness.STRICTNESS_WARNING;
case "permissive":
return HprofPreferences.HprofStrictness.STRICTNESS_PERMISSIVE;
default:
return HprofPreferences.DEFAULT_STRICTNESS;
}
}
public static void setStrictness(String strictness) {
HprofPreferences.setStrictness(parseStrictness(strictness));
}
}
| 6,637 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/test/java/org/eclipse/jifa | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/test/java/org/eclipse/jifa/tda/TestSerDesParser.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda;
import org.eclipse.jifa.tda.model.Snapshot;
import org.eclipse.jifa.tda.parser.ParserException;
import org.eclipse.jifa.tda.parser.SerDesParser;
import org.junit.Assert;
import org.junit.Test;
import java.net.URISyntaxException;
import static org.eclipse.jifa.common.listener.ProgressListener.NoOpProgressListener;
public class TestSerDesParser extends TestBase {
@Test
public void test() throws ParserException, URISyntaxException {
SerDesParser serDesAnalyzer = new SerDesParser(analyzer);
Snapshot first = serDesAnalyzer.parse(pathOfResource("jstack_8.log"), NoOpProgressListener);
Snapshot second = serDesAnalyzer.parse(pathOfResource("jstack_8.log"), NoOpProgressListener);
Assert.assertEquals(first, second);
}
}
| 6,638 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/test/java/org/eclipse/jifa | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/test/java/org/eclipse/jifa/tda/TestPool.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda;
import org.eclipse.jifa.tda.model.Pool;
import org.junit.Assert;
import org.junit.Test;
public class TestPool extends TestBase {
@Test
public void test() {
Pool<String> sp = new Pool<>();
sp.add("abc");
sp.add("ab" + "c");
sp.add("a" + "bc");
sp.add("cba");
Assert.assertEquals(2, sp.size());
sp.freeze();
}
}
| 6,639 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/test/java/org/eclipse/jifa | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/test/java/org/eclipse/jifa/tda/TestBase.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda;
import org.apache.commons.io.IOUtils;
import org.eclipse.jifa.tda.model.Snapshot;
import org.eclipse.jifa.tda.parser.JStackParser;
import org.eclipse.jifa.tda.parser.ParserException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import static org.eclipse.jifa.common.listener.ProgressListener.NoOpProgressListener;
public class TestBase {
protected final JStackParser analyzer = new JStackParser();
protected Path pathOfResource(String name) throws URISyntaxException {
return Paths.get(this.getClass().getClassLoader().getResource(name).toURI());
}
protected Path createTempFile(String content) throws IOException {
Path path = Files.createTempFile("test", ".tmp");
path.toFile().deleteOnExit();
IOUtils.write(content, new FileOutputStream(path.toFile()), Charset.defaultCharset());
return path;
}
protected Snapshot parseString(String content) throws ParserException, IOException {
return analyzer.parse(createTempFile(content), NoOpProgressListener);
}
protected Snapshot parseFile(String name) throws ParserException, URISyntaxException {
return analyzer.parse(pathOfResource(name), NoOpProgressListener);
}
}
| 6,640 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/test/java/org/eclipse/jifa | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/test/java/org/eclipse/jifa/tda/TestJStackParser.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda;
import org.eclipse.jifa.tda.model.Snapshot;
import org.eclipse.jifa.tda.parser.ParserException;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.net.URISyntaxException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
public class TestJStackParser extends TestBase {
@Test
public void testTime() throws ParserException, ParseException, IOException {
String time = "2021-06-12 23:07:17";
Snapshot snapshot = parseString(time);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
Assert.assertEquals(sdf.parse(time).getTime(), snapshot.getTimestamp());
time = "2021-06-12 23:07:18\n";
snapshot = parseString(time);
Assert.assertEquals(sdf.parse(time).getTime(), snapshot.getTimestamp());
}
@Test
public void testVersion() throws ParserException, IOException {
String version = "Full thread dump OpenJDK 64-Bit Server VM (15.0.1+9-18 mixed mode, sharing):";
Snapshot snapshot = parseString(version);
Assert.assertEquals(-1, snapshot.getTimestamp());
Assert.assertEquals("OpenJDK 64-Bit Server VM (15.0.1+9-18 mixed mode, sharing)", snapshot.getVmInfo());
}
@Test
public void testJDK8Log() throws ParserException, URISyntaxException {
Snapshot snapshot = parseFile("jstack_8.log");
Assert.assertTrue(snapshot.getErrors().isEmpty());
}
@Test
public void testJDK11Log() throws ParserException, URISyntaxException {
Snapshot snapshot = parseFile("jstack_11_with_deadlocks.log");
Assert.assertTrue(snapshot.getErrors().isEmpty());
}
}
| 6,641 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/test/java/org/eclipse/jifa | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/test/java/org/eclipse/jifa/tda/TestAnalyzer.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda;
import org.eclipse.jifa.common.listener.DefaultProgressListener;
import org.eclipse.jifa.common.request.PagingRequest;
import org.eclipse.jifa.common.vo.PageView;
import org.eclipse.jifa.tda.enums.ThreadType;
import org.eclipse.jifa.tda.vo.Content;
import org.eclipse.jifa.tda.vo.Overview;
import org.eclipse.jifa.tda.vo.VFrame;
import org.eclipse.jifa.tda.vo.VMonitor;
import org.eclipse.jifa.tda.vo.VThread;
import org.junit.Assert;
import org.junit.Test;
public class TestAnalyzer extends TestBase {
@Test
public void test() throws Exception {
ThreadDumpAnalyzer tda =
new ThreadDumpAnalyzer(pathOfResource("jstack_8.log"), new DefaultProgressListener());
Overview o1 = tda.overview();
Overview o2 = tda.overview();
Assert.assertEquals(o1, o2);
Assert.assertEquals(o1.hashCode(), o2.hashCode());
PageView<VThread> threads = tda.threads("main", ThreadType.JAVA, new PagingRequest(1, 1));
Assert.assertEquals(1, threads.getTotalSize());
PageView<VFrame> frames = tda.callSiteTree(0, new PagingRequest(1, 16));
Assert.assertTrue(frames.getTotalSize() > 0);
Assert.assertNotEquals(frames.getData().get(0), frames.getData().get(1));
PageView<VMonitor> monitors = tda.monitors(new PagingRequest(1, 8));
Assert.assertTrue(monitors.getTotalSize() > 0);
Content line2 = tda.content(2, 1);
Assert.assertEquals("Full thread dump OpenJDK 64-Bit Server VM (18-internal+0-adhoc.denghuiddh.my-jdk mixed " +
"mode, sharing):", line2.getContent().get(0));
}
}
| 6,642 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/ThreadDumpAnalyzer.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda;
import org.apache.commons.lang.StringUtils;
import org.eclipse.jifa.common.cache.Cacheable;
import org.eclipse.jifa.common.cache.ProxyBuilder;
import org.eclipse.jifa.common.listener.ProgressListener;
import org.eclipse.jifa.common.request.PagingRequest;
import org.eclipse.jifa.common.util.CollectionUtil;
import org.eclipse.jifa.common.util.PageViewBuilder;
import org.eclipse.jifa.common.vo.PageView;
import org.eclipse.jifa.tda.enums.MonitorState;
import org.eclipse.jifa.tda.enums.ThreadType;
import org.eclipse.jifa.tda.model.CallSiteTree;
import org.eclipse.jifa.tda.model.Frame;
import org.eclipse.jifa.tda.model.IdentityPool;
import org.eclipse.jifa.tda.model.JavaThread;
import org.eclipse.jifa.tda.model.Monitor;
import org.eclipse.jifa.tda.model.RawMonitor;
import org.eclipse.jifa.tda.model.Snapshot;
import org.eclipse.jifa.tda.model.Thread;
import org.eclipse.jifa.tda.parser.ParserFactory;
import org.eclipse.jifa.tda.vo.Content;
import org.eclipse.jifa.tda.vo.Overview;
import org.eclipse.jifa.tda.vo.VFrame;
import org.eclipse.jifa.tda.vo.VMonitor;
import org.eclipse.jifa.tda.vo.VThread;
import java.io.FileReader;
import java.io.IOException;
import java.io.LineNumberReader;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Thread dump analyzer
*/
public class ThreadDumpAnalyzer {
private final Snapshot snapshot;
ThreadDumpAnalyzer(Path path, ProgressListener listener) {
snapshot = ParserFactory.buildParser(path).parse(path, listener);
}
/**
* build a parser for a thread dump
*
* @param path the path of thread dump
* @param listener progress listener
* @return analyzer
*/
public static ThreadDumpAnalyzer build(Path path, ProgressListener listener) {
return ProxyBuilder.build(ThreadDumpAnalyzer.class,
new Class[]{Path.class, ProgressListener.class},
new Object[]{path, listener});
}
private void computeThreadState(Overview o, Thread thread) {
ThreadType type = thread.getType();
switch (type) {
case JAVA:
JavaThread jt = ((JavaThread) thread);
o.getJavaThreadStat().inc(jt.getJavaThreadState());
o.getJavaThreadStat().inc(jt.getOsThreadState());
if (jt.isDaemon()) {
o.getJavaThreadStat().incDaemon();
}
break;
case JIT:
o.getJitThreadStat().inc(thread.getOsThreadState());
break;
case GC:
o.getGcThreadStat().inc(thread.getOsThreadState());
break;
case VM:
o.getOtherThreadStat().inc(thread.getOsThreadState());
break;
}
o.getThreadStat().inc(thread.getOsThreadState());
}
/**
* @return the overview of the thread dump
*/
@Cacheable
public Overview overview() {
Overview o = new Overview();
CollectionUtil.forEach(t -> computeThreadState(o, t), snapshot.getJavaThreads(), snapshot.getNonJavaThreads());
snapshot.getThreadGroup().forEach(
(p, l) -> {
for (Thread t : l) {
o.getThreadGroupStat().computeIfAbsent(p, i -> new Overview.ThreadStat()).inc(t.getOsThreadState());
}
}
);
o.setTimestamp(snapshot.getTimestamp());
o.setVmInfo(snapshot.getVmInfo());
o.setJniRefs(snapshot.getJniRefs());
o.setJniWeakRefs(snapshot.getJniWeakRefs());
if (snapshot.getDeadLockThreads() != null) {
o.setDeadLockCount(snapshot.getDeadLockThreads().size());
}
o.setErrorCount(snapshot.getErrors().size());
return o;
}
/**
* @return the call site tree by parent id
*/
public PageView<VFrame> callSiteTree(int parentId, PagingRequest paging) {
CallSiteTree tree = snapshot.getCallSiteTree();
if (parentId < 0 || parentId >= tree.getId2Node().length) {
throw new IllegalArgumentException("Illegal parent id: " + parentId);
}
CallSiteTree.Node node = tree.getId2Node()[parentId];
List<CallSiteTree.Node> children = node.getChildren() != null ? node.getChildren() : Collections.emptyList();
return PageViewBuilder.build(children, paging, n -> {
VFrame vFrame = new VFrame();
vFrame.setId(n.getId());
vFrame.setWeight(n.getWeight());
vFrame.setEnd(n.getChildren() == null);
Frame frame = n.getFrame();
vFrame.setClazz(frame.getClazz());
vFrame.setMethod(frame.getMethod());
vFrame.setModule(frame.getModule());
vFrame.setSourceType(frame.getSourceType());
vFrame.setSource(frame.getSource());
vFrame.setLine(frame.getLine());
if (frame.getMonitors() != null) {
List<VMonitor> vMonitors = new ArrayList<>();
for (Monitor monitor : frame.getMonitors()) {
String clazz = null;
RawMonitor rm = monitor.getRawMonitor();
clazz = rm.getClazz();
vMonitors.add(new VMonitor(rm.getId(), rm.getAddress(), rm.isClassInstance(),
clazz,
monitor.getState()));
}
vFrame.setMonitors(vMonitors);
}
return vFrame;
});
}
private PageView<VThread> buildVThreadPageView(List<Thread> threads, PagingRequest paging) {
return PageViewBuilder.build(threads, paging, thread -> {
VThread vThread = new VThread();
vThread.setId(thread.getId());
vThread.setName(thread.getName());
return vThread;
});
}
/**
* @param name the thread name
* @param type the thread type
* @param paging paging request
* @return the threads filtered by name and type
*/
public PageView<VThread> threads(String name, ThreadType type, PagingRequest paging) {
List<Thread> threads = new ArrayList<>();
CollectionUtil.forEach(t -> {
if (type != null && t.getType() != type) {
return;
}
if (StringUtils.isNotBlank(name) && !t.getName().contains(name)) {
return;
}
threads.add(t);
}, snapshot.getJavaThreads(), snapshot.getNonJavaThreads());
return buildVThreadPageView(threads, paging);
}
/**
* @param groupName the thread group name
* @param paging paging request
* @return the threads filtered by group name and type
*/
public PageView<VThread> threadsOfGroup(String groupName, PagingRequest paging) {
List<Thread> threads = snapshot.getThreadGroup().getOrDefault(groupName, Collections.emptyList());
return buildVThreadPageView(threads, paging);
}
public List<String> rawContentOfThread(int id) throws IOException {
Thread thread = snapshot.getThreadMap().get(id);
if (thread == null) {
throw new IllegalArgumentException("Thread id is illegal: " + id);
}
String path = snapshot.getPath();
int start = thread.getLineStart();
int end = thread.getLineEnd();
List<String> content = new ArrayList<>();
try (LineNumberReader lnr = new LineNumberReader(new FileReader(path))) {
for (int i = 1; i < start; i++) {
lnr.readLine();
}
for (int i = start; i <= end; i++) {
content.add(lnr.readLine());
}
}
return content;
}
/**
* @param lineNo start line number
* @param lineLimit line count
* @return the raw content
* @throws IOException
*/
public Content content(int lineNo, int lineLimit) throws IOException {
String path = snapshot.getPath();
int end = lineNo + lineLimit - 1;
List<String> content = new ArrayList<>();
boolean reachEnd;
try (LineNumberReader lnr = new LineNumberReader(new FileReader(path))) {
for (int i = 1; i < lineNo; i++) {
String line = lnr.readLine();
if (line == null) {
break;
}
}
for (int i = lineNo; i <= end; i++) {
String line = lnr.readLine();
if (line == null) {
break;
}
content.add(line);
}
String line = lnr.readLine();
reachEnd = line == null;
}
return new Content(content, reachEnd);
}
/**
* @param paging paging request
* @return the monitors
*/
public PageView<VMonitor> monitors(PagingRequest paging) {
IdentityPool<RawMonitor> monitors = snapshot.getRawMonitors();
return PageViewBuilder.build(monitors.objects(), paging,
m -> new VMonitor(m.getId(), m.getAddress(), m.isClassInstance(), m.getClazz()));
}
/**
* @param id monitor id
* @param state monitor state
* @param paging paging request
* @return the threads by monitor id and state
*/
public PageView<VThread> threadsByMonitor(int id, MonitorState state, PagingRequest paging) {
Map<MonitorState, List<Thread>> map = snapshot.getMonitorThreads().get(id);
if (map == null) {
throw new IllegalArgumentException("Illegal monitor id: " + id);
}
return buildVThreadPageView(map.getOrDefault(state, Collections.emptyList()), paging);
}
/**
* @param id monitor id
* @return the <state, count> map by monitor id
*/
public Map<MonitorState, Integer> threadCountsByMonitor(int id) {
Map<MonitorState, List<Thread>> map = snapshot.getMonitorThreads().get(id);
if (map == null) {
throw new IllegalArgumentException("Illegal monitor id: " + id);
}
Map<MonitorState, Integer> counts = new HashMap<>();
map.forEach((s, l) -> counts.put(s, l.size()));
return counts;
}
}
| 6,643 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/vo/VMonitor.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.vo;
import com.google.gson.annotations.SerializedName;
import lombok.AllArgsConstructor;
import lombok.Data;
import org.eclipse.jifa.tda.enums.MonitorState;
@Data
@AllArgsConstructor
public class VMonitor {
private int id;
private long address;
private boolean classInstance;
@SerializedName("class")
private String clazz;
private MonitorState state;
public VMonitor(int id, long address, boolean classInstance, String clazz) {
this.id = id;
this.address = address;
this.classInstance = classInstance;
this.clazz = clazz;
}
}
| 6,644 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/vo/VThread.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.vo;
import lombok.Data;
@Data
public class VThread {
private int id;
private String name;
}
| 6,645 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/vo/VFrame.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.vo;
import com.google.gson.annotations.SerializedName;
import lombok.Data;
import org.eclipse.jifa.tda.enums.SourceType;
import java.util.List;
@Data
public class VFrame {
private int id;
@SerializedName("class")
private String clazz;
private String method;
private String module;
private SourceType sourceType;
private String source;
private int line;
private int weight;
private VMonitor wait;
private List<VMonitor> monitors;
private boolean end;
}
| 6,646 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/vo/Overview.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.vo;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.eclipse.jifa.tda.enums.JavaThreadState;
import org.eclipse.jifa.tda.enums.OSTreadState;
import java.util.HashMap;
import java.util.Map;
@Data
public class Overview {
private long timestamp;
private String vmInfo;
private int jniRefs;
private int jniWeakRefs;
private int deadLockCount;
private int errorCount;
private ThreadStat threadStat = new ThreadStat();
private JavaThreadStat javaThreadStat = new JavaThreadStat();
private ThreadStat jitThreadStat = new ThreadStat();
private ThreadStat gcThreadStat = new ThreadStat();
private ThreadStat otherThreadStat = new ThreadStat();
private Map<String, ThreadStat> threadGroupStat = new HashMap<>();
private final OSTreadState[] states = OSTreadState.values();
private final JavaThreadState[] javaStates = JavaThreadState.values();
@Data
public static class ThreadStat {
private final int[] counts = new int[OSTreadState.COUNT];
public void inc(OSTreadState state) {
counts[state.ordinal()]++;
}
}
@Data
@EqualsAndHashCode(callSuper = true)
public static class JavaThreadStat extends ThreadStat {
private final int[] javaCounts = new int[JavaThreadState.COUNT];
private int daemonCount;
public void inc(JavaThreadState state) {
javaCounts[state.ordinal()]++;
}
public void incDaemon() {
daemonCount++;
}
}
}
| 6,647 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/vo/Content.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.vo;
import lombok.AllArgsConstructor;
import lombok.Data;
import java.util.List;
@Data
@AllArgsConstructor
public class Content {
private List<String> content;
private boolean end;
}
| 6,648 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/util/Converter.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.util;
public class Converter {
public static double str2TimeMillis(String str) {
if (str == null) {
return -1;
}
int length = str.length();
if (str.endsWith("ms")) {
return Double.parseDouble(str.substring(0, length - 2));
} else if (str.endsWith("s")) {
return Double.parseDouble(str.substring(0, length - 1)) * 1000;
}
throw new IllegalArgumentException(str);
}
}
| 6,649 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/enums/ThreadType.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.enums;
public enum ThreadType {
JAVA,
// NOTE: Actually a JIT thread is also a JAVA thread in hotspot implementation
JIT,
GC,
VM,
}
| 6,650 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/enums/OSTreadState.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.enums;
public enum OSTreadState {
ALLOCATED("allocated"),
INITIALIZED("initialized"),
RUNNABLE("runnable"),
MONITOR_WAIT("waiting for monitor entry"),
COND_VAR_WAIT("waiting on condition"),
OBJECT_WAIT("in Object.wait()"),
BREAK_POINTED("at breakpoint"),
SLEEPING("sleeping"),
ZOMBIE("zombie"),
UNKNOWN("unknown state");
public static final int COUNT = OSTreadState.values().length;
private final String description;
OSTreadState(String description) {
this.description = description;
}
public static OSTreadState getByDescription(String s) {
if (s == null) {
throw new IllegalArgumentException();
}
for (OSTreadState state : OSTreadState.values()) {
if (s.startsWith(state.description)) {
return state;
}
}
return UNKNOWN;
}
}
| 6,651 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/enums/JavaThreadState.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.enums;
public enum JavaThreadState {
NEW("NEW"),
RUNNABLE("RUNNABLE"),
SLEEPING("TIMED_WAITING (sleeping)"),
IN_OBJECT_WAIT("WAITING (on object monitor)"),
IN_OBJECT_WAIT_TIMED("TIMED_WAITING (on object monitor)"),
PARKED("WAITING (parking)"),
PARKED_TIMED("TIMED_WAITING (parking)"),
BLOCKED_ON_MONITOR_ENTER("BLOCKED (on object monitor)"),
TERMINATED("TERMINATED"),
UNKNOWN("UNKNOWN");
public static final int COUNT = JavaThreadState.values().length;
private final String description;
JavaThreadState(String description) {
this.description = description;
}
public static JavaThreadState getByDescription(String s) {
if (s == null) {
throw new IllegalArgumentException();
}
for (JavaThreadState state : JavaThreadState.values()) {
if (s.startsWith(state.description)) {
return state;
}
}
return UNKNOWN;
}
}
| 6,652 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/enums/MonitorState.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.enums;
public enum MonitorState {
WAITING_ON("- waiting on"),
WAITING_TO_RE_LOCK("- waiting to re-lock"),
WAITING_ON_NO_OBJECT_REFERENCE_AVAILABLE("- waiting on"),
PARKING("- parking"),
WAITING_ON_CLASS_INITIALIZATION("- waiting on the Class initialization monitor"),
LOCKED("- locked"),
WAITING_TO_LOCK("- waiting to lock"),
ELIMINATED_SCALAR_REPLACED("- eliminated <owner is scalar replaced>"),
ELIMINATED("- eliminated");
private final String prefix;
MonitorState(String prefix) {
this.prefix = prefix;
}
public String prefix() {
return prefix;
}
}
| 6,653 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/enums/SourceType.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.enums;
public enum SourceType {
REDEFINED,
NATIVE_METHOD,
SOURCE_FILE,
SOURCE_FILE_WITH_LINE_NUMBER,
UNKNOWN_SOURCE;
public static SourceType judge(String source) {
if (source.contains(":")) {
return SOURCE_FILE_WITH_LINE_NUMBER;
}
if (source.endsWith(".java")) {
return SOURCE_FILE;
}
if (source.equals("Redefined")) {
return REDEFINED;
}
if (source.equals("Native Method")) {
return NATIVE_METHOD;
}
if (source.equals("Unknown Source")) {
return UNKNOWN_SOURCE;
}
return SOURCE_FILE;
}
}
| 6,654 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/parser/JStackParser.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.parser;
import org.apache.commons.lang.StringUtils;
import org.eclipse.jifa.common.listener.ProgressListener;
import org.eclipse.jifa.tda.enums.JavaThreadState;
import org.eclipse.jifa.tda.enums.MonitorState;
import org.eclipse.jifa.tda.enums.OSTreadState;
import org.eclipse.jifa.tda.enums.SourceType;
import org.eclipse.jifa.tda.enums.ThreadType;
import org.eclipse.jifa.tda.model.ConcurrentLock;
import org.eclipse.jifa.tda.model.Frame;
import org.eclipse.jifa.tda.model.JavaThread;
import org.eclipse.jifa.tda.model.Monitor;
import org.eclipse.jifa.tda.model.Pool;
import org.eclipse.jifa.tda.model.RawMonitor;
import org.eclipse.jifa.tda.model.Snapshot;
import org.eclipse.jifa.tda.model.Thread;
import org.eclipse.jifa.tda.model.Trace;
import org.eclipse.jifa.tda.util.Converter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.lang.reflect.Field;
import java.nio.file.Path;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.BlockingDeque;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class JStackParser implements Parser {
private static final Logger LOGGER = LoggerFactory.getLogger(JStackParser.class);
private static final BlockingDeque<ParserImpl.RawJavaThread> QUEUE;
private static final ExecutorService ES;
static {
QUEUE = new LinkedBlockingDeque<>(128);
int count = Math.max(2, Runtime.getRuntime().availableProcessors());
ES = Executors.newFixedThreadPool(count);
assert count >= 2;
for (int i = 0; i < count; i++) {
ES.submit(() -> {
//noinspection InfiniteLoopStatement
while (true) {
try {
QUEUE.take().parse();
} catch (Throwable t) {
LOGGER.error("Parse one thread error", t);
}
}
});
}
}
@Override
public Snapshot parse(Path path, ProgressListener listener) {
try {
Snapshot snapshot = new ParserImpl(path, listener).parse();
snapshot.post();
return snapshot;
} catch (Throwable t) {
if (t instanceof ParserException) {
throw (ParserException) t;
}
throw new ParserException(t);
}
}
static final class PATTERNS {
static String TIME_FORMAT;
static Pattern TIME;
static Pattern VERSION;
static String SMR_HEAD;
static Pattern JAVA_THREAD;
static Pattern JAVA_STATE;
static Pattern JAVA_FRAME;
static Pattern NO_JAVA_THREAD;
static Pattern JNI_GLOBAL_REFS;
static Pattern WAITING_ON;
static Pattern WAITING_TO_RE_LOCK;
static Pattern PARKING;
static Pattern WAITING_ON_CLASS_INITIALIZATION;
static Pattern LOCKED;
static Pattern WAITING_TO_LOCK;
static Pattern ELIMINATED_SCALAR_REPLACED;
static Pattern ELIMINATED;
static String LOCKED_OWNABLE_SYNCHRONIZERS;
static String NONE;
static Pattern LOCKED_SYNCHRONIZER;
static String DEAD_LOCK_HEAD;
static Pattern DEAD_LOCK_THREAD;
static Pattern DEAD_LOCK_WAITING_TO_LOCK_MONITOR;
static Pattern DEAD_LOCK_WAITING_TO_LOCK_SYNCHRONIZER;
static Pattern DEAD_HELD_INFO;
static String DEAD_LOCK_STACK_HEAD;
static Pattern DEAD_FOUND;
static {
try {
String fn = "jstack_pattern.properties";
Properties ps = new Properties();
ps.load(PATTERNS.class.getClassLoader().getResourceAsStream(fn));
Field[] fields = PATTERNS.class.getDeclaredFields();
for (Field field : fields) {
String value = (String) ps.get(field.getName());
if (value == null) {
throw new ParserException(field.getName() + " not found in " + fn);
}
Class<?> type = field.getType();
if (type == Pattern.class) {
field.set(null, Pattern.compile((String) ps.get(field.getName())));
} else if (type == String.class) {
field.set(null, value);
}
}
} catch (Throwable t) {
if (t instanceof ParserException) {
throw (ParserException) t;
}
throw new ParserException(t);
}
}
static Pattern patternOf(ParserImpl.Element element) {
switch (element) {
case TIME:
return TIME;
case VERSION:
return VERSION;
case JNI_GLOBAL_REFS:
return JNI_GLOBAL_REFS;
case NON_JAVA_THREAD:
return NO_JAVA_THREAD;
default:
throw new ParserException("Should not reach here");
}
}
}
private static class ParserImpl {
private final Input input;
private final AtomicInteger processing;
private final List<Throwable> errors;
private final Snapshot snapshot;
private final ProgressListener listener;
ParserImpl(Path path, ProgressListener listener) throws IOException {
this.input = new Input(path);
this.listener = listener;
snapshot = new Snapshot();
snapshot.setPath(path.toAbsolutePath().toString());
processing = new AtomicInteger(0);
errors = new ArrayList<>();
step();
}
Snapshot parse() throws Exception {
listener.beginTask("Parsing thread dump", 100);
listener.subTask("Parsing timestamp and version");
parseTimeStamp();
parseVersion();
listener.worked(1);
skipSMR();
// concurrent
listener.subTask("Parsing threads");
parseThreads();
listener.subTask("Parsing JNI handles");
parseJNIGlobalHandles();
listener.worked(1);
listener.subTask("Parsing JNI deadLocks");
parseDeadLocks();
listener.worked(8);
// Wait for all Java threads to complete
synchronized (this) {
while (processing.get() != 0) {
this.wait();
}
}
listener.worked(90);
return snapshot;
}
void step() throws IOException {
String line;
while ((line = input.readLine()) != null) {
if (!StringUtils.isBlank(line)) {
return;
}
}
}
void skipSMR() throws IOException {
if (PATTERNS.SMR_HEAD.equals(input.currentLine())) {
//noinspection StatementWithEmptyBody
while (StringUtils.isNotBlank(input.readLine()))
;
}
}
void parseByElementPattern(Element element, Action action, boolean stepOnFailed) throws Exception {
String line = input.currentLine();
if (line == null) {
LOGGER.warn("Skip parsing {} caused by EOF", element.description);
return;
}
Matcher matcher = PATTERNS.patternOf(element).matcher(line);
if (matcher.matches()) {
try {
action.onMatched(matcher);
} finally {
step();
}
} else {
LOGGER.warn("Parse {} failed: {}", element.description, line);
if (stepOnFailed) {
step();
}
}
}
void parseTimeStamp() throws Exception {
parseByElementPattern(Element.TIME, m -> {
long ts = new SimpleDateFormat(PATTERNS.TIME_FORMAT).parse(input.currentLine()).getTime();
snapshot.setTimestamp(ts);
}, false);
}
void parseVersion() throws Exception {
parseByElementPattern(Element.VERSION, m -> {
snapshot.setVmInfo(m.group("info"));
}, false);
}
void parseJNIGlobalHandles() throws Exception {
parseByElementPattern(Element.JNI_GLOBAL_REFS, m -> {
String all = m.group("all");
if (all != null) {
snapshot.setJniRefs(Integer.parseInt(all));
} else {
int strong = Integer.parseInt(m.group("strong"));
int weak = Integer.parseInt(m.group("weak"));
snapshot.setJniRefs(strong + weak);
snapshot.setJniWeakRefs(weak);
}
}, false);
}
void parseDeadLocks() throws Exception {
String line = input.currentLine();
if (line == null) {
return;
}
int dlCount = 0;
while (line.equals(PATTERNS.DEAD_LOCK_HEAD)) {
dlCount++;
if (snapshot.getDeadLockThreads() == null) {
snapshot.setDeadLockThreads(new ArrayList<>());
}
List<JavaThread> threads = new ArrayList<>();
// skip ====
input.readLine();
step();
line = input.currentLine();
Matcher matcher;
int tCount = 0;
do {
matcher = PATTERNS.DEAD_LOCK_THREAD.matcher(line);
if (!matcher.matches()) {
throw new ParserException("Illegal dead lock thread name");
}
JavaThread thread = new JavaThread();
String name = matcher.group("name");
threads.add(thread);
tCount++;
thread.setName(snapshot.getSymbols().add(name));
thread.setType(ThreadType.JAVA);
// wait and held info
input.readLine();
input.readLine();
line = input.readLine();
} while (line.startsWith("\""));
step();
line = input.currentLine();
if (!line.equals(PATTERNS.DEAD_LOCK_STACK_HEAD)) {
throw new ParserException("Illegal dead lock stack head");
}
// skip ====
input.readLine();
line = input.readLine();
for (int i = 0; i < tCount; i++) {
matcher = PATTERNS.DEAD_LOCK_THREAD.matcher(line);
if (!matcher.matches()) {
throw new ParserException("Illegal dead lock thread name");
}
List<String> stackTraces = new ArrayList<>();
while (true) {
line = input.readLine();
if (line != null && !line.startsWith("\"") && !line.isBlank() && !line.startsWith("Found")) {
stackTraces.add(line);
} else {
Trace trace = parseStackTrace(threads.get(i), true, stackTraces);
threads.get(i).setTrace(snapshot.getTraces().add(trace));
break;
}
}
}
snapshot.getDeadLockThreads().add(threads);
}
if (dlCount > 0) {
step();
line = input.currentLine();
Matcher matcher = PATTERNS.DEAD_FOUND.matcher(line);
if (!matcher.matches()) {
throw new ParserException("Missing Dead lock found line");
}
if (Integer.parseInt(matcher.group("count")) != dlCount) {
throw new ParserException("Dead lock count mismatched");
}
}
}
void enroll(RawJavaThread tp) {
processing.incrementAndGet();
try {
QUEUE.put(tp);
} catch (Throwable t) {
processing.decrementAndGet();
}
}
ThreadType typeOf(String name, boolean javaThread) {
if (javaThread) {
if (name.startsWith("C1 CompilerThread") || name.startsWith("C2 CompilerThread")) {
return ThreadType.JIT;
}
return ThreadType.JAVA;
}
if (name.contains("GC") || name.contains("G1") || name.contains("CMS") ||
name.contains("Concurrent Mark-Sweep")) {
return ThreadType.GC;
}
return ThreadType.VM;
}
void fillThread(Thread thread, Matcher m) {
Pool<String> symbols = snapshot.getSymbols();
String name = m.group("name");
thread.setName(symbols.add(name));
thread.setType(typeOf(name, thread instanceof JavaThread));
thread.setOsPriority(Integer.parseInt(m.group("osPriority")));
thread.setCpu(Converter.str2TimeMillis(m.group("cpu")));
thread.setElapsed(Converter.str2TimeMillis(m.group("elapsed")));
thread.setTid(Long.decode(m.group("tid")));
thread.setNid(Long.decode(m.group("nid")));
thread.setOsThreadState(OSTreadState.getByDescription(m.group("state").trim()));
}
void parseThreads() throws Exception {
String line = input.currentLine();
do {
while (StringUtils.isBlank(line)) {
line = input.readLine();
if (line == null) {
return;
}
}
if (line.startsWith("\"")) {
if (!line.endsWith("]")) {
// not a java thread
break;
}
RawJavaThread rjt = new RawJavaThread();
rjt.contents.add(line);
rjt.lineStart = input.lineNumber();
while ((line = input.readLine()) != null) {
if (StringUtils.isBlank(line)) {
continue;
}
if (line.startsWith("\"")) {
break;
}
if (line.startsWith(MonitorState.ELIMINATED_SCALAR_REPLACED.prefix())) {
// this problem is fixed by JDK-8268780(JDK 18)
int index = line.indexOf(")");
if (index > 0 && line.length() > index + 1) {
rjt.contents.add(line.substring(0, index + 1));
rjt.contents.add(line.substring(index + 1).trim());
continue;
}
}
rjt.contents.add(line);
rjt.lineEnd = input.lineNumber();
}
enroll(rjt);
} else {
break;
}
} while (true);
// other threads
do {
while (StringUtils.isBlank(line)) {
line = input.readLine();
if (line == null) {
return;
}
}
if (line.startsWith("\"")) {
parseByElementPattern(Element.NON_JAVA_THREAD, m -> {
Thread thread = new Thread();
fillThread(thread, m);
thread.setLineStart(input.lineNumber());
thread.setLineEnd(input.lineNumber());
snapshot.getNonJavaThreads().add(thread);
}, true);
} else {
break;
}
// step in parseByElementPattern
} while ((line = input.currentLine()) != null);
}
void done() {
int remain = processing.decrementAndGet();
if (remain == 0) {
synchronized (this) {
this.notify();
}
}
}
void recordError(Throwable t) {
synchronized (this) {
errors.add(t);
t.printStackTrace();
}
}
void onParseRawThreadError(Throwable t) {
recordError(t);
int remain = processing.decrementAndGet();
if (remain == 0) {
synchronized (this) {
this.notify();
}
}
}
void checkLastFrameNotNull(Frame last, String line) {
if (last == null) {
throw new ParserException("Last frame doesn't exist: " + line);
}
}
Monitor assembleMonitor(Thread thread, boolean needMap, MonitorState state, long address,
boolean isClass, String clazz) {
RawMonitor rm = new RawMonitor();
rm.setAddress(address);
rm.setClassInstance(isClass);
rm.setClazz(clazz);
rm = snapshot.getRawMonitors().add(rm);
Monitor monitor = new Monitor();
monitor.setRawMonitor(rm);
monitor.setState(state);
monitor = snapshot.getMonitors().add(monitor);
if (needMap) {
synchronized (this) {
boolean shouldMap = true;
if (state == MonitorState.LOCKED) {
Map<MonitorState, List<Thread>> map = snapshot.getMonitorThreads().get(rm.getId());
if (map != null) {
for (Map.Entry<MonitorState, List<Thread>> entry : map.entrySet()) {
if (entry.getKey() != MonitorState.LOCKED && entry.getValue().contains(thread)) {
shouldMap = false;
break;
}
}
}
}
if (shouldMap) {
snapshot.getMonitorThreads()
.computeIfAbsent(rm.getId(), i -> new HashMap<>())
.computeIfAbsent(state, s -> new ArrayList<>())
.add(thread);
}
}
}
return monitor;
}
Trace parseStackTrace(Thread thread, boolean deadLockThread, List<String> stackTraces) {
Pool<String> symbolPool = snapshot.getSymbols();
Pool<Frame> framePool = snapshot.getFrames();
Pool<Monitor> monitorPool = snapshot.getMonitors();
Trace trace = new Trace();
List<Frame> frames = new ArrayList<>();
List<Monitor> monitors = new ArrayList<>();
Frame last = null;
for (int i = 0; i < stackTraces.size(); i++) {
Matcher m;
String line = stackTraces.get(i);
if (line.startsWith("at")) {
m = PATTERNS.JAVA_FRAME.matcher(line);
if (!m.matches()) {
throw new ParserException("Illegal java frame: " + line);
}
if (!monitors.isEmpty()) {
last.setMonitors(monitors.toArray(new Monitor[0]));
monitors.clear();
}
if (last != null) {
// add frame here since all related information has been processed
frames.add(framePool.add(last));
}
last = new Frame();
last.setClazz(symbolPool.add(m.group("class")));
last.setMethod(symbolPool.add(m.group("method")));
String module = m.group("module");
if (module != null) {
// strip '/'
last.setModule(symbolPool.add(module.substring(0, module.length() - 1)));
}
String source = m.group("source");
SourceType sourceType = SourceType.judge(source);
last.setSourceType(sourceType);
if (sourceType == SourceType.SOURCE_FILE_WITH_LINE_NUMBER) {
int index = source.indexOf(":");
last.setLine(Integer.parseInt(source.substring(index + 1)));
source = source.substring(0, index);
last.setSource(symbolPool.add(source));
} else if (sourceType == SourceType.SOURCE_FILE) {
last.setSource(symbolPool.add(source));
}
} else {
if (line.startsWith(MonitorState.PARKING.prefix())) {
assert last != null;
m = PATTERNS.PARKING.matcher(line);
if (!m.matches()) {
throw new ParserException("Illegal parking line: " + line);
}
monitors.add(assembleMonitor(thread, !deadLockThread, MonitorState.PARKING,
Long.decode(m.group("address")),
false, symbolPool.add(m.group("class"))));
} else if (line.startsWith(MonitorState.WAITING_ON.prefix())) {
assert last != null;
if (line.contains("<no object reference available>")) {
monitors
.add(assembleMonitor(thread, !deadLockThread,
MonitorState.WAITING_ON_NO_OBJECT_REFERENCE_AVAILABLE,
-1, false, null));
} else {
m = PATTERNS.WAITING_ON.matcher(line);
if (!m.matches()) {
throw new ParserException("Illegal waiting line: " + line);
}
monitors
.add(assembleMonitor(thread, !deadLockThread, MonitorState.WAITING_ON,
Long.decode(m.group("address")),
m.group("isClass") != null,
symbolPool.add(m.group("class"))));
}
} else if (line.startsWith(MonitorState.WAITING_TO_RE_LOCK.prefix())) {
assert last != null;
m = PATTERNS.WAITING_TO_RE_LOCK.matcher(line);
if (!m.matches()) {
throw new ParserException("Illegal waiting to re-lock line: " + line);
}
monitors
.add(assembleMonitor(thread, !deadLockThread, MonitorState.WAITING_TO_RE_LOCK,
Long.decode(m.group("address")),
m.group("isClass") != null,
symbolPool.add(m.group("class"))));
} else if (line.startsWith(MonitorState.WAITING_ON_CLASS_INITIALIZATION.prefix())) {
assert last != null;
m = PATTERNS.WAITING_ON_CLASS_INITIALIZATION.matcher(line);
if (!m.matches()) {
throw new ParserException(
"Illegal waiting on class initialization line: " + line);
}
monitors
.add(assembleMonitor(thread, !deadLockThread,
MonitorState.WAITING_ON_CLASS_INITIALIZATION,
-1, true, symbolPool.add(m.group("class"))));
} else if (line.startsWith(MonitorState.LOCKED.prefix())) {
checkLastFrameNotNull(last, line);
m = PATTERNS.LOCKED.matcher(line);
if (!m.matches()) {
throw new ParserException("Illegal locked line: " + line);
}
monitors.add(assembleMonitor(thread, !deadLockThread, MonitorState.LOCKED,
Long.decode(m.group("address")),
m.group("isClass") != null,
symbolPool.add(m.group("class"))));
} else if (line.startsWith(MonitorState.WAITING_TO_LOCK.prefix())) {
checkLastFrameNotNull(last, line);
m = PATTERNS.WAITING_TO_LOCK.matcher(line);
if (!m.matches()) {
throw new ParserException("Illegal waiting to lock line: " + line);
}
monitors.add(assembleMonitor(thread, !deadLockThread, MonitorState.WAITING_TO_LOCK,
Long.decode(m.group("address")),
m.group("isClass") != null,
symbolPool.add(m.group("class"))));
} else if (line.startsWith(MonitorState.ELIMINATED.prefix())) {
checkLastFrameNotNull(last, line);
m = PATTERNS.ELIMINATED.matcher(line);
if (!m.matches()) {
throw new ParserException("Illegal eliminated lock line: " + line);
}
monitors.add(assembleMonitor(thread, !deadLockThread, MonitorState.ELIMINATED,
Long.decode(m.group("address")),
m.group("isClass") != null,
symbolPool.add(m.group("class"))));
} else if (line.startsWith(MonitorState.ELIMINATED_SCALAR_REPLACED.prefix())) {
checkLastFrameNotNull(last, line);
m = PATTERNS.ELIMINATED_SCALAR_REPLACED.matcher(line);
if (!m.matches()) {
throw new ParserException(
"Illegal eliminated(scalar replaced) lock line: " + line);
}
monitors.add(assembleMonitor(thread, !deadLockThread,
MonitorState.ELIMINATED_SCALAR_REPLACED,
-1,
false,
symbolPool.add(m.group("class"))));
} else if (line.equals(PATTERNS.LOCKED_OWNABLE_SYNCHRONIZERS)) {
// concurrent locks
int lockIndex = i + 1;
line = stackTraces.get(lockIndex);
if (PATTERNS.NONE.equals(line)) {
if (lockIndex + 1 != stackTraces.size()) {
throw new ParserException("Should not have content after: " + line);
}
} else {
Pool<ConcurrentLock> concurrentPool = snapshot.getConcurrentLocks();
List<ConcurrentLock> concurrentLocks = new ArrayList<>();
do {
m = PATTERNS.LOCKED_SYNCHRONIZER.matcher(line);
if (!m.matches()) {
throw new ParserException("Illegal lock synchronizer line: " + line);
}
ConcurrentLock concurrentLock = new ConcurrentLock();
concurrentLock.setAddress(Long.decode(m.group("address")));
concurrentLock.setClazz(symbolPool.add(m.group("class")));
concurrentLocks.add(concurrentPool.add(concurrentLock));
if (++lockIndex < stackTraces.size()) {
line = stackTraces.get(lockIndex);
} else {
break;
}
} while (true);
trace.setConcurrentLocks(
concurrentLocks.toArray(new ConcurrentLock[0]));
}
break;
} else {
throw new ParserException("Unrecognized line: " + line);
}
}
}
if (last != null) {
if (!monitors.isEmpty()) {
last.setMonitors(monitors.toArray(new Monitor[0]));
}
frames.add(framePool.add(last));
}
trace.setFrames(frames.toArray(new Frame[0]));
return trace;
}
void parse(RawJavaThread rjt) {
try {
List<String> contents = rjt.contents;
assert contents.size() >= 2;
String line = contents.get(0);
Matcher m = PATTERNS.JAVA_THREAD.matcher(contents.get(0));
if (!m.matches()) {
throw new ParserException("Illegal java thread: " + line);
}
JavaThread thread = new JavaThread();
fillThread(thread, m);
thread.setLineStart(rjt.lineStart);
thread.setLineEnd(rjt.lineEnd);
thread.setJid(Long.parseLong(m.group("id")));
thread.setDaemon(m.group("daemon") != null);
thread.setPriority(Integer.parseInt(m.group("priority")));
thread.setLastJavaSP(Long.decode(m.group("lastJavaSP")));
// java thread state
line = contents.get(1);
m = PATTERNS.JAVA_STATE.matcher(line);
if (!m.matches()) {
throw new ParserException("Illegal java thread state: " + line);
}
thread.setJavaThreadState(JavaThreadState.getByDescription(m.group("state")));
if (contents.size() > 2 && thread.getType() == ThreadType.JAVA /* skip jit */) {
// trace
Trace trace = parseStackTrace(thread, false, contents.subList(2, contents.size()));
snapshot.getCallSiteTree().add(trace);
thread.setTrace(snapshot.getTraces().add(trace));
}
synchronized (this) {
snapshot.getJavaThreads().add(thread);
}
done();
} catch (Throwable t) {
onParseRawThreadError(t);
}
}
enum Element {
TIME("dump time"),
VERSION("vm version"),
JNI_GLOBAL_REFS("JNI global references"),
NON_JAVA_THREAD("Non Java Thread");
private final String description;
Element(String description) {
this.description = description;
}
}
interface Action {
void onMatched(Matcher matcher) throws Exception;
}
class RawJavaThread {
private final List<String> contents;
private int lineStart;
private int lineEnd;
public RawJavaThread() {
contents = new ArrayList<>();
}
void parse() {
ParserImpl.this.parse(this);
}
}
}
}
| 6,655 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/parser/Parser.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.parser;
import org.eclipse.jifa.common.listener.ProgressListener;
import org.eclipse.jifa.tda.model.Snapshot;
import java.nio.file.Path;
/**
* Thread dump parser
*/
public interface Parser {
/**
* Generate a snapshot for the thread dump identified by path
*
* @param path the path of thread dump
* @param listener progress listener for parsing
* @return the snapshot of thread dump
* @throws ParserException the exception occurred during parsing
*/
Snapshot parse(Path path, ProgressListener listener) throws ParserException;
}
| 6,656 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/parser/ParserFactory.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.parser;
import java.nio.file.Path;
public class ParserFactory {
private static final Parser DEFAULT = new SerDesParser(new JStackParser());
public static Parser buildParser(Path path) {
return DEFAULT;
}
}
| 6,657 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/parser/ParserException.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.parser;
public class ParserException extends RuntimeException {
public ParserException() {
}
public ParserException(String message) {
super(message);
}
public ParserException(String message, Throwable cause) {
super(message, cause);
}
public ParserException(Throwable cause) {
super(cause);
}
}
| 6,658 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/parser/SerDesParser.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.parser;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
import org.eclipse.jifa.common.listener.ProgressListener;
import org.eclipse.jifa.tda.model.Snapshot;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
public class SerDesParser implements Parser {
private static final Logger LOGGER = LoggerFactory.getLogger(SerDesParser.class);
private static final ThreadLocal<Kryo> KRYO;
static {
KRYO = ThreadLocal.withInitial(() -> {
Kryo kryo = new Kryo();
kryo.register(Snapshot.class);
return kryo;
});
}
private final Parser parser;
public SerDesParser(Parser parser) {
this.parser = parser;
}
private Path storage(Path from) {
return Paths.get(from.toFile().getAbsoluteFile() + ".kryo");
}
@Override
public Snapshot parse(Path path, ProgressListener listener) {
// TODO: multi-threads support
Path storage = storage(path);
if (storage.toFile().exists()) {
try {
listener.beginTask("Deserializing thread dump snapshot", 100);
Snapshot snapshot = deserialize(storage);
listener.worked(100);
return snapshot;
} catch (Throwable t) {
LOGGER.error("Deserialize thread dump snapshot failed", t);
listener.sendUserMessage(ProgressListener.Level.WARNING, "Deserialize thread dump snapshot failed", t);
listener.reset();
}
}
listener.beginTask(null, 5);
Snapshot snapshot = parser.parse(path, listener);
try {
serialize(snapshot, storage);
listener.worked(5);
} catch (Throwable t) {
LOGGER.error("Serialize snapshot failed");
}
return snapshot;
}
private void serialize(Snapshot snapshot, Path path) throws FileNotFoundException {
Kryo kryo = KRYO.get();
try (Output out = new Output(new FileOutputStream(path.toFile()))) {
kryo.writeObject(out, snapshot);
}
}
private Snapshot deserialize(Path path) throws IOException {
Kryo kryo = KRYO.get();
try (Input input = new Input(new FileInputStream(path.toFile()))) {
return kryo.readObject(input, Snapshot.class);
}
}
}
| 6,659 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/parser/Input.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.parser;
import java.io.Closeable;
import java.io.FileReader;
import java.io.IOException;
import java.io.LineNumberReader;
import java.nio.file.Path;
public class Input implements Closeable {
private final LineNumberReader lnr;
private String current;
public Input(Path dumpPath) throws IOException {
lnr = new LineNumberReader(new FileReader(dumpPath.toFile()));
}
public void mark() throws IOException {
lnr.mark(1024);
}
public void reset() throws IOException {
lnr.reset();
}
public int lineNumber() {
return lnr.getLineNumber();
}
public String readLine() throws IOException {
current = lnr.readLine();
if (current != null) {
current = current.trim();
}
return current;
}
public String currentLine() {
return current;
}
@Override
public void close() throws IOException {
lnr.close();
}
}
| 6,660 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/model/IdentityPool.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.model;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
public class IdentityPool<O extends Identity> extends Pool<O> {
private final AtomicInteger id;
private ConcurrentHashMap<O, AtomicInteger> refCountMap;
private List<O> objects;
public IdentityPool() {
id = new AtomicInteger();
refCountMap = new ConcurrentHashMap<>();
}
@Override
public O add(O o) {
O pooled = map.computeIfAbsent(o, k -> {
k.setId(nextId());
return k;
});
refCountMap.computeIfAbsent(pooled, k -> new AtomicInteger(0)).incrementAndGet();
return pooled;
}
private int nextId() {
int id = this.id.incrementAndGet();
assert id > 0;
return id;
}
public void freeze() {
objects = new ArrayList<>(map.values());
objects.sort((k1, k2) -> refCountMap.get(k2).get() - refCountMap.get(k1).get());
super.freeze();
refCountMap = null;
}
public List<O> objects() {
return objects;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
if (!super.equals(o))
return false;
IdentityPool<?> that = (IdentityPool<?>) o;
return Objects.equals(id.get(), that.id.get()) && Objects.equals(refCountMap, that.refCountMap) &&
Objects.equals(objects, that.objects);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), id, refCountMap, objects);
}
}
| 6,661 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/model/CallSiteTree.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.model;
import lombok.Data;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
@Data
public class CallSiteTree {
private final Node root;
private List<List<Node>> allChildren;
private int count;
private Node[] id2Node;
public CallSiteTree() {
root = new Node();
allChildren = new ArrayList<>();
}
public synchronized void add(Trace trace) {
Frame[] frames = trace.getFrames();
root.weight++;
Node parent = root;
for (Frame frame : frames) {
parent = addChildren(parent, frame);
}
}
public void freeze() {
id2Node = new Node[count + 1];
id2Node[0] = root;
int index = 1;
for (List<Node> children : allChildren) {
children.sort((o1, o2) -> o2.weight - o1.weight);
for (Node n : children) {
n.setId(index);
id2Node[index++] = n;
}
}
assert index == count + 1;
allChildren = null;
}
private Node addChildren(Node parent, Frame frame) {
List<Node> children = parent.children;
if (children == null) {
Node node = new Node(frame);
count++;
children = new ArrayList<>();
children.add(node);
parent.children = children;
allChildren.add(children);
return node;
}
int low = 0;
int high = children.size() - 1;
while (low <= high) {
int mid = low + (high - low) / 2;
Node node = children.get(mid);
if (node.frame.equals(frame)) {
node.weight++;
return node;
} else if (node.frame.hashCode() < frame.hashCode()) {
low = mid + 1;
} else {
high = mid - 1;
}
}
Node node = new Node(frame);
count++;
children.add(low, node);
return node;
}
@Override
public String toString() {
return "CallSiteTree{" +
"root=" + root +
", allChildren=" + allChildren +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
CallSiteTree tree = (CallSiteTree) o;
return count == tree.count && Objects.equals(root, tree.root) &&
Objects.equals(allChildren, tree.allChildren) && Arrays.equals(id2Node, tree.id2Node);
}
@Override
public int hashCode() {
int result = Objects.hash(root, allChildren, count);
result = 31 * result + Arrays.hashCode(id2Node);
return result;
}
@Data
public static class Node extends Identity {
Frame frame;
int weight;
List<Node> children;
public Node(Frame frame) {
this.frame = frame;
weight = 1;
}
public Node() {
frame = null;
weight = 0;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
Node node = (Node) o;
return weight == node.weight && Objects.equals(frame, node.frame) &&
Objects.equals(children, node.children);
}
@Override
public int hashCode() {
return Objects.hash(frame, weight, children);
}
}
}
| 6,662 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/model/Frame.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.model;
import lombok.Data;
import org.eclipse.jifa.tda.enums.SourceType;
import java.util.Arrays;
import java.util.Objects;
@Data
public class Frame {
private String clazz;
private String method;
private String module;
private SourceType sourceType;
// -1 means unknown
private String source;
// -1 means unknown
private int line = -1;
private Monitor[] monitors;
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
Frame frame = (Frame) o;
return line == frame.line && Objects.equals(clazz, frame.clazz) &&
Objects.equals(method, frame.method) && Objects.equals(module, frame.module) &&
sourceType == frame.sourceType && Objects.equals(source, frame.source) &&
Arrays.equals(monitors, frame.monitors);
}
@Override
public int hashCode() {
int result = Objects.hash(clazz, method, module, sourceType, source, line);
result = 31 * result + Arrays.hashCode(monitors);
return result;
}
}
| 6,663 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/model/Pool.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.model;
import lombok.Data;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
@Data
public class Pool<O> {
Map<O, O> map;
public Pool() {
map = new ConcurrentHashMap<>();
}
public O add(O o) {
return map.computeIfAbsent(o, k -> k);
}
public int size() {
return map.size();
}
public void freeze() {
map = null;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
Pool<?> pool = (Pool<?>) o;
return Objects.equals(map, pool.map);
}
@Override
public int hashCode() {
return Objects.hash(map);
}
} | 6,664 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/model/JavaThread.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.model;
import lombok.Data;
import org.eclipse.jifa.tda.enums.JavaThreadState;
import java.util.Objects;
@Data
public class JavaThread extends Thread {
private long jid;
private boolean daemon;
private int priority;
private long lastJavaSP;
private JavaThreadState javaThreadState;
private Trace trace;
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
if (!super.equals(o))
return false;
JavaThread that = (JavaThread) o;
return jid == that.jid && daemon == that.daemon && priority == that.priority && lastJavaSP == that.lastJavaSP &&
javaThreadState == that.javaThreadState && Objects.equals(trace, that.trace);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), jid, daemon, priority, lastJavaSP, javaThreadState, trace);
}
}
| 6,665 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/model/Identity.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.model;
import lombok.Data;
@Data
public abstract class Identity {
private int id;
@Override
public boolean equals(Object o) {
throw new UnsupportedOperationException();
}
@Override
public int hashCode() {
throw new UnsupportedOperationException();
}
}
| 6,666 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/model/Snapshot.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.model;
import lombok.Data;
import org.eclipse.jifa.common.Constant;
import org.eclipse.jifa.common.util.CollectionUtil;
import org.eclipse.jifa.tda.enums.MonitorState;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@Data
public class Snapshot {
// constant pools
private Pool<String> symbols = new Pool<>();
private Pool<Frame> frames = new Pool<>();
private Pool<Trace> traces = new Pool<>();
private IdentityPool<RawMonitor> rawMonitors= new IdentityPool<>();
private Pool<Monitor> monitors = new Pool<>();
private Pool<ConcurrentLock> concurrentLocks = new Pool<>();
private String path;
// -1 means unknown
private long timestamp = -1L;
private String vmInfo = Constant.UNKNOWN_STRING;
private final List<JavaThread> javaThreads = new ArrayList<>();
private final List<Thread> nonJavaThreads = new ArrayList<>();
private final Map<Integer, Thread> threadMap = new HashMap<>();
private int nextThreadId = 1;
private CallSiteTree callSiteTree = new CallSiteTree();
// -1 means unknown
private int jniRefs = -1;
// -1 means unknown
private int jniWeakRefs = -1;
private Map<String, List<Thread>> threadGroup;
private Map<Integer, Map<MonitorState, List<Thread>>> monitorThreads = new HashMap<>();
// dead locks
private List<List<JavaThread>> deadLockThreads;
// parse error
private final List<Error> errors = new ArrayList<>();
private static final Pattern GROUP_NAME_PATTERN = Pattern.compile("(?<prefix>.*)[-#]\\d+( .+)?");
private void assignThreadIdAndComputeThreadGroupInfo() {
Map<String, List<Thread>> map = new HashMap<>();
CollectionUtil.forEach(t -> {
t.setId(nextThreadId++);
threadMap.put(t.getId(), t);
String name = t.getName();
Matcher matcher = GROUP_NAME_PATTERN.matcher(name);
if (matcher.matches()) {
String prefix = matcher.group("prefix");
map.computeIfAbsent(prefix, i -> new ArrayList<>()).add(t);
}
}, nonJavaThreads, javaThreads);
threadGroup = new HashMap<>();
for (Map.Entry<String, List<Thread>> entry : map.entrySet()) {
if (entry.getValue().size() > 1) {
threadGroup.put(entry.getKey(), entry.getValue());
}
}
}
public void post() {
symbols = null;
frames = null;
traces = null;
monitors = null;
concurrentLocks = null;
rawMonitors.freeze();
callSiteTree.freeze();
javaThreads.sort(Comparator.comparingInt(Thread::getLineStart));
nonJavaThreads.sort(Comparator.comparingInt(Thread::getLineStart));
assignThreadIdAndComputeThreadGroupInfo();
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
Snapshot snapshot = (Snapshot) o;
return timestamp == snapshot.timestamp && nextThreadId == snapshot.nextThreadId &&
jniRefs == snapshot.jniRefs &&
jniWeakRefs == snapshot.jniWeakRefs && Objects.equals(symbols, snapshot.symbols) &&
Objects.equals(frames, snapshot.frames) && Objects.equals(traces, snapshot.traces) &&
Objects.equals(rawMonitors, snapshot.rawMonitors) &&
Objects.equals(monitors, snapshot.monitors) &&
Objects.equals(concurrentLocks, snapshot.concurrentLocks) &&
Objects.equals(path, snapshot.path) && Objects.equals(vmInfo, snapshot.vmInfo) &&
Objects.equals(javaThreads, snapshot.javaThreads) &&
Objects.equals(nonJavaThreads, snapshot.nonJavaThreads) &&
Objects.equals(threadMap, snapshot.threadMap) &&
Objects.equals(callSiteTree, snapshot.callSiteTree) &&
Objects.equals(threadGroup, snapshot.threadGroup) &&
Objects.equals(monitorThreads, snapshot.monitorThreads) &&
Objects.equals(deadLockThreads, snapshot.deadLockThreads) &&
Objects.equals(errors, snapshot.errors);
}
}
| 6,667 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/model/ConcurrentLock.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.model;
import lombok.Data;
import java.util.Objects;
@Data
public class ConcurrentLock {
private long address;
private String clazz;
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
ConcurrentLock that = (ConcurrentLock) o;
return address == that.address && Objects.equals(clazz, that.clazz);
}
@Override
public int hashCode() {
return Objects.hash(address, clazz);
}
}
| 6,668 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/model/RawMonitor.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.model;
import lombok.Data;
import java.util.Objects;
@Data
public class RawMonitor extends Identity {
private long address = -1;
private boolean classInstance;
private String clazz;
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
RawMonitor that = (RawMonitor) o;
return address == that.address && classInstance == that.classInstance &&
Objects.equals(clazz, that.clazz);
}
@Override
public int hashCode() {
return Objects.hash(address, classInstance, clazz);
}
}
| 6,669 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/model/Monitor.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.model;
import lombok.Data;
import org.eclipse.jifa.tda.enums.MonitorState;
import java.util.Objects;
@Data
public class Monitor {
private RawMonitor rawMonitor;
private MonitorState state;
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
Monitor monitor = (Monitor) o;
return Objects.equals(rawMonitor, monitor.rawMonitor) && state == monitor.state;
}
@Override
public int hashCode() {
return Objects.hash(rawMonitor, state);
}
}
| 6,670 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/model/Thread.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.model;
import lombok.Data;
import org.eclipse.jifa.tda.enums.OSTreadState;
import org.eclipse.jifa.tda.enums.ThreadType;
import java.util.Objects;
@Data
public class Thread extends Identity {
private String name;
private int osPriority;
// ms, optional
// -1 means unknown
private double cpu = -1;
// ms, optional
// -1 means unknown
private double elapsed = -1;
private long tid;
private long nid;
private OSTreadState osThreadState;
private ThreadType type;
private int lineStart;
// include
private int lineEnd;
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
Thread thread = (Thread) o;
return osPriority == thread.osPriority && Double.compare(thread.cpu, cpu) == 0 &&
Double.compare(thread.elapsed, elapsed) == 0 && tid == thread.tid && nid == thread.nid &&
lineStart == thread.lineStart && lineEnd == thread.lineEnd && Objects.equals(name, thread.name) &&
osThreadState == thread.osThreadState && type == thread.type;
}
@Override
public int hashCode() {
return Objects
.hash(name, osPriority, cpu, elapsed, tid, nid, osThreadState, type, lineStart, lineEnd);
}
}
| 6,671 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/model/Error.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.model;
import lombok.Data;
@Data
public class Error {
private String detail;
private int lineStart;
// include
private int lineEnd;
}
| 6,672 |
0 | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda | Create_ds/eclipse-jifa/backend/thread-dump-analyzer/src/main/java/org/eclipse/jifa/tda/model/Trace.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.tda.model;
import lombok.Data;
import java.util.Arrays;
@Data
public class Trace {
private Frame[] frames;
private ConcurrentLock[] concurrentLocks;
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
Trace trace = (Trace) o;
return Arrays.equals(frames, trace.frames) &&
Arrays.equals(concurrentLocks, trace.concurrentLocks);
}
@Override
public int hashCode() {
int result = Arrays.hashCode(frames);
result = 31 * result + Arrays.hashCode(concurrentLocks);
return result;
}
}
| 6,673 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/ErrorCode.java | /********************************************************************************
* Copyright (c) 2020 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common;
public enum ErrorCode {
SHOULD_NOT_REACH_HERE,
UNKNOWN_ERROR,
ILLEGAL_ARGUMENT,
SANITY_CHECK,
FILE_DOES_NOT_EXIST,
FILE_HAS_BEEN_DELETED,
TRANSFER_ERROR,
NOT_TRANSFERRED,
FILE_TYPE_MISMATCHED,
HOST_IP_MISMATCHED,
TRANSFERRING,
UPLOADING,
UPLOAD_TO_OSS_ERROR,
/**
* for master
*/
DUMMY_ERROR_CODE,
FORBIDDEN,
PENDING_JOB,
IMMEDIATE_JOB,
JOB_DOES_NOT_EXIST,
WORKER_DOES_NOT_EXIST,
WORKER_DISABLED,
PRIVATE_HOST_IP,
REPEATED_USER_WORKER,
SERVER_TOO_BUSY,
UNSUPPORTED_OPERATION,
FILE_IS_IN_USED,
FILE_IS_BEING_DELETING,
RETRY,
RELEASE_PENDING_JOB,
READINESS_PROBE_FAILURE;
public boolean isFatal() {
switch (this) {
case ILLEGAL_ARGUMENT:
case FILE_DOES_NOT_EXIST:
return false;
default:
return true;
}
}
}
| 6,674 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/Constant.java | /********************************************************************************
* Copyright (c) 2020, 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common;
import java.util.concurrent.TimeUnit;
public interface Constant {
String HEADER_CONTENT_TYPE_KEY = "Content-Type";
String HEADER_CONTENT_LENGTH_KEY = "Content-Length";
String HEADER_CONTENT_DISPOSITION = "Content-Disposition";
String HEADER_AUTHORIZATION = "authorization";
String CONTENT_TYPE_JSON_FORM = "application/json; charset=UTF-8";
String CONTENT_TYPE_FILE_FORM = "application/octet-stream";
String COOKIE_AUTHORIZATION = "jifa-authorization";
String HEADER_AUTHORIZATION_PREFIX = "Bearer ";
int HTTP_GET_OK_STATUS_CODE = 200;
int HTTP_POST_CREATED_STATUS_CODE = 201;
int HTTP_BAD_REQUEST_STATUS_CODE = 400;
int HTTP_POST_CREATED_STATUS = 201;
int HTTP_INTERNAL_SERVER_ERROR_STATUS_CODE = 500;
String LINE_SEPARATOR = System.lineSeparator();
String EMPTY_STRING = "";
String FILE_TYPE = "type";
String PAGE = "page";
String PAGE_SIZE = "pageSize";
String UNKNOWN_STRING = "UNKNOWN";
String DEFAULT_WORKSPACE = System.getProperty("user.home") + java.io.File.separator + "jifa_workspace";
long STALE_THRESHOLD = TimeUnit.HOURS.toMillis(6);
}
| 6,675 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/JifaException.java | /********************************************************************************
* Copyright (c) 2020 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common;
public class JifaException extends RuntimeException {
private ErrorCode code = ErrorCode.UNKNOWN_ERROR;
public JifaException() {
this(ErrorCode.UNKNOWN_ERROR);
}
public JifaException(String detail) {
this(ErrorCode.UNKNOWN_ERROR, detail);
}
public JifaException(ErrorCode code) {
this(code, code.name());
}
public JifaException(ErrorCode code, String detail) {
super(detail);
this.code = code;
}
public JifaException(Throwable cause) {
super(cause);
}
public JifaException(ErrorCode errorCode, Throwable cause) {
super(cause);
this.code = errorCode;
}
public ErrorCode getCode() {
return code;
}
}
| 6,676 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/JifaHooks.java | /********************************************************************************
* Copyright (c) 2020 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common;
import io.vertx.core.Vertx;
import io.vertx.core.http.HttpServerOptions;
import io.vertx.core.json.JsonObject;
import io.vertx.ext.web.Router;
import org.eclipse.jifa.common.enums.FileType;
public interface JifaHooks {
/* Access the server configuration at startup. This will be called by JIFA and configuration passed in.
This will be called once for each verticle. */
default void init(JsonObject config) {
}
/* Provide custom http server configuration to vertx. */
default HttpServerOptions serverOptions(Vertx vertx) {
return new HttpServerOptions();
}
/* Access the route configuration before JIFA routes are loaded.
You could use this to customize redirects, authenticate, etc. */
default void beforeRoutes(Vertx vertx, Router router) {
}
/* Access route configuration after JIFA routes are loaded.
You could use this to customize error handling, etc. */
default void afterRoutes(Vertx vertx, Router router) {
}
/* Provide custom mapping for directory path, file, and index functionality. */
default String mapDirPath(FileType fileType, String name, String defaultPath) {
return defaultPath;
}
default String mapFilePath(FileType fileType, String name, String childrenName, String defaultPath) {
return defaultPath;
}
default String mapIndexPath(FileType fileType, String file, String defaultPath) {
return defaultPath;
}
/* An empty default configuration */
public class EmptyHooks implements JifaHooks {
// use default implementations
}
}
| 6,677 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/vo/FileInfo.java | /********************************************************************************
* Copyright (c) 2020 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common.vo;
import lombok.Data;
import org.eclipse.jifa.common.enums.FileTransferState;
import org.eclipse.jifa.common.enums.FileType;
@Data
public class FileInfo {
private String originalName;
private String name;
private long size;
private FileType type;
private FileTransferState transferState;
private boolean downloadable;
private long creationTime;
}
| 6,678 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/vo/ErrorResult.java | /********************************************************************************
* Copyright (c) 2020 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common.vo;
import io.vertx.serviceproxy.ServiceException;
import lombok.Data;
import org.eclipse.jifa.common.ErrorCode;
import org.eclipse.jifa.common.JifaException;
@Data
public class ErrorResult {
private ErrorCode errorCode = ErrorCode.UNKNOWN_ERROR;
private String message;
public ErrorResult(Throwable t) {
if (t instanceof JifaException) {
errorCode = ((JifaException) t).getCode();
}
if (t instanceof ServiceException) {
ServiceException se = (ServiceException) t;
if (ErrorCode.values().length > se.failureCode() && se.failureCode() >= 0) {
errorCode = ErrorCode.values()[se.failureCode()];
}
}
if (t instanceof IllegalArgumentException) {
errorCode = ErrorCode.ILLEGAL_ARGUMENT;
}
Throwable cause = t;
while (cause.getCause() != null) {
cause = cause.getCause();
}
if (cause instanceof JifaException) {
message = cause.getMessage();
} else {
message = cause.getClass().getSimpleName() + ": " + cause.getMessage();
}
}
}
| 6,679 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/vo/Progress.java | /********************************************************************************
* Copyright (c) 2020 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common.vo;
import lombok.Data;
import org.eclipse.jifa.common.enums.ProgressState;
@Data
public class Progress {
private ProgressState state;
private double percent = 0;
private String message;
}
| 6,680 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/vo/DiskUsage.java | /********************************************************************************
* Copyright (c) 2020 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common.vo;
import lombok.Data;
@Data
public class DiskUsage {
private long totalSpaceInMb;
private long usedSpaceInMb;
public DiskUsage() {
}
public DiskUsage(long totalSpaceInMb, long usedSpaceInMb) {
this.totalSpaceInMb = totalSpaceInMb;
this.usedSpaceInMb = usedSpaceInMb;
}
}
| 6,681 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/vo/TransferProgress.java | /********************************************************************************
* Copyright (c) 2020 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common.vo;
import lombok.Data;
import lombok.EqualsAndHashCode;
@Data
@EqualsAndHashCode(callSuper = true)
public class TransferProgress extends Progress {
private long totalSize;
private long transferredSize;
}
| 6,682 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/vo/LogFileInfo.java | /********************************************************************************
* Copyright (c) 2020 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common.vo;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
@Builder
public class LogFileInfo {
private boolean belong2Master;
private String ip;
private String path;
private String hostName;
}
| 6,683 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/vo/Result.java | /********************************************************************************
* Copyright (c) 2020 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common.vo;
import lombok.Data;
@Data
public class Result<R> {
private R result;
public Result(R result) {
this.result = result;
}
}
| 6,684 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/vo/PageView.java | /********************************************************************************
* Copyright (c) 2020, 2021 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common.vo;
import lombok.Data;
import org.eclipse.jifa.common.request.PagingRequest;
import java.util.Collections;
import java.util.List;
@Data
public class PageView<T> {
public static final PageView<?> EMPTY = new PageView<>(null, 0, Collections.emptyList());
@SuppressWarnings("unchecked")
public static <T> PageView<T> empty() {
return (PageView<T>) EMPTY;
}
private List<T> data;
private int page;
private int pageSize;
private int totalSize;
private int filtered;
public PageView(PagingRequest request, int totalSize, List<T> data) {
this.data = data;
this.page = request != null ? request.getPage() : 0;
this.pageSize = request != null ? request.getPageSize() : 0;
this.totalSize = totalSize;
}
public PageView() {
}
}
| 6,685 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/vo/TransferringFile.java | /********************************************************************************
* Copyright (c) 2020 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common.vo;
import lombok.Data;
@Data
public class TransferringFile {
private String name;
public TransferringFile(String fileName) {
this.name = fileName;
}
}
| 6,686 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/vo | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/vo/support/SearchType.java | /********************************************************************************
* Copyright (c) 2020, 2021 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common.vo.support;
public enum SearchType {
BY_NAME,
BY_OBJ_NUM,
BY_SHALLOW_SIZE,
BY_RETAINED_SIZE,
BY_PERCENT,
BY_CLASSLOADER_COUNT,
BY_CONTEXT_CLASSLOADER_NAME;
}
| 6,687 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/vo | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/vo/support/SortTableGenerator.java | /********************************************************************************
* Copyright (c) 2020, 2021 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common.vo.support;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Function;
public class SortTableGenerator<T> {
private final Map<String, Comparator<T>> table;
public SortTableGenerator() {
this.table = new HashMap<>();
}
public Map<String, Comparator<T>> build() {
return table;
}
public SortTableGenerator<T> add(String key, Comparator<T> comp) {
table.put(key, comp);
return this;
}
public <U extends Comparable<? super U>> SortTableGenerator<T> add(String key, Function<T, ? extends U> val) {
table.put(key, Comparator.comparing(val));
return this;
}
}
| 6,688 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/vo | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/vo/support/SearchPredicate.java | /********************************************************************************
* Copyright (c) 2020, 2021 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common.vo.support;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.function.Predicate;
import java.util.regex.Pattern;
public class SearchPredicate {
private static final Logger LOGGER = LoggerFactory.getLogger(SearchPredicate.class);
public static <T extends Searchable> Predicate<T> createPredicate(String searchText, SearchType searchType) {
if (searchText == null || searchType == null || searchText.isEmpty()) {
return (T record) -> true;
}
Predicate<T> pred;
try {
switch (searchType) {
case BY_NAME:
case BY_CONTEXT_CLASSLOADER_NAME: {
Pattern p = Pattern.compile(searchText);
pred = (T record) -> p.matcher(((String) record.getBySearchType(searchType))).matches();
break;
}
case BY_PERCENT: {
String prefix = extractPrefix(searchText);
double num = Double.parseDouble(extractNumberText(searchText)) / 100.0;
switch (prefix) {
case "==":
pred = (T record) -> Double.compare((double) record.getBySearchType(searchType), num) == 0;
break;
case ">=":
pred = (T record) -> (double) record.getBySearchType(searchType) >= num;
break;
case "<=":
pred = (T record) -> (double) record.getBySearchType(searchType) <= num;
break;
case ">":
pred = (T record) -> (double) record.getBySearchType(searchType) > num;
break;
case "<":
pred = (T record) -> (double) record.getBySearchType(searchType) < num;
break;
case "!=":
pred = (T record) -> Double.compare((double) record.getBySearchType(searchType), num) != 0;
break;
default:
pred = (T record) -> false;
break;
}
break;
}
default: {
final String prefix = extractPrefix(searchText);
final long num = Long.parseLong(extractNumberText(searchText));
switch (prefix) {
case "==":
pred = (T record) -> (long) record.getBySearchType(searchType) == num;
break;
case ">=":
pred = (T record) -> (long) record.getBySearchType(searchType) >= num;
break;
case "<=":
pred = (T record) -> (long) record.getBySearchType(searchType) <= num;
break;
case ">":
pred = (T record) -> (long) record.getBySearchType(searchType) > num;
break;
case "<":
pred = (T record) -> (long) record.getBySearchType(searchType) < num;
break;
case "!=":
pred = (T record) -> (long) record.getBySearchType(searchType) != num;
break;
default:
pred = (T record) -> false;
break;
}
break;
}
}
} catch (RuntimeException ignored) {
LOGGER.debug("unexpected exception generating search `" + searchText + "` with type " + searchType.name());
pred = (T record) -> false;
}
// wrap for error handling
final Predicate<T> unwrapped = pred;
return (T record) -> {
try {
return unwrapped.test(record);
} catch (Throwable ignored) {
LOGGER.debug("unexpected exception when search `" + searchText + "` with type " + searchType.name());
return false;
}
};
}
private static String extractPrefix(String text) {
if (StringUtils.isNumeric(text)) {
return "==";
}
String prefix = "";
prefix += text.charAt(0);
if (text.charAt(1) == '=') {
prefix += "=";
return prefix;
}
return prefix;
}
private static String extractNumberText(String text) {
for (int i = 0; i < 3; i++) {
if (Character.isDigit(text.charAt(i))) {
return text.substring(i);
}
}
return "";
}
}
| 6,689 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/vo | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/vo/support/Searchable.java | /********************************************************************************
* Copyright (c) 2020, 2021 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common.vo.support;
public interface Searchable {
Object getBySearchType(SearchType type);
}
| 6,690 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/listener/ProgressListener.java | /********************************************************************************
* Copyright (c) 2021, 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common.listener;
public interface ProgressListener {
ProgressListener NoOpProgressListener = new NoOpProgressListener();
void beginTask(String s, int i);
void subTask(String s);
void worked(int i);
void sendUserMessage(Level level, String s, Throwable throwable);
default void reset() {}
String log();
double percent();
enum Level {
INFO,
WARNING,
ERROR;
}
}
| 6,691 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/listener/NoOpProgressListener.java | /********************************************************************************
* Copyright (c) 2021, 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common.listener;
class NoOpProgressListener implements ProgressListener {
@Override
public void beginTask(String s, int i) {}
@Override
public void subTask(String s) {}
@Override
public void worked(int i) {}
@Override
public void sendUserMessage(Level level, String s, Throwable throwable) {}
@Override
public String log() {
return null;
}
@Override
public double percent() {
return 0;
}
}
| 6,692 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/listener/DefaultProgressListener.java | /********************************************************************************
* Copyright (c) 2021, 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common.listener;
import java.io.PrintWriter;
import java.io.StringWriter;
public class DefaultProgressListener implements ProgressListener {
private final StringBuffer log = new StringBuffer();
private int total;
private int done;
private String lastSubTask;
private void append(String msg) {
log.append(msg);
log.append(System.lineSeparator());
}
@Override
public void beginTask(String s, int i) {
total += i;
append(String.format("[Begin task] %s", s));
}
@Override
public void subTask(String s) {
if (lastSubTask == null || !lastSubTask.equals(s)) {
lastSubTask = s;
append(String.format("[Sub task] %s", s));
}
}
@Override
public void worked(int i) {
done += i;
}
@Override
public void sendUserMessage(Level level, String s, Throwable throwable) {
StringWriter sw = new StringWriter();
switch (level) {
case INFO:
sw.append("[INFO] ");
break;
case WARNING:
sw.append("[WARNING] ");
break;
case ERROR:
sw.append("[ERROR] ");
break;
default:
sw.append("[UNKNOWN] ");
}
sw.append(s);
if (throwable != null) {
sw.append(System.lineSeparator());
throwable.printStackTrace(new PrintWriter(sw));
}
append(sw.toString());
}
@Override
public String log() {
return log.toString();
}
@Override
public double percent() {
return total == 0 ? 0 : ((double) done) / ((double) total);
}
}
| 6,693 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/cache/Handler.java | /********************************************************************************
* Copyright (c) 2021, 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common.cache;
import net.sf.cglib.proxy.MethodInterceptor;
import net.sf.cglib.proxy.MethodProxy;
import org.eclipse.jifa.common.JifaException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.List;
class Handler implements MethodInterceptor {
private final Cache cache;
private final List<Method> cacheableMethods;
public Handler(Class<?> target) {
cache = new Cache();
cacheableMethods = new ArrayList<>();
try {
Method[] methods = target.getDeclaredMethods();
for (Method method : methods) {
if (method.getAnnotation(Cacheable.class) != null) {
method.setAccessible(true);
int mod = method.getModifiers();
if (Modifier.isAbstract(mod) || Modifier.isFinal(mod) ||
!(Modifier.isPublic(mod) || Modifier.isProtected(mod))) {
throw new JifaException("Illegal method modifier: " + method);
}
cacheableMethods.add(method);
}
}
} catch (Exception exception) {
throw new JifaException(exception);
}
}
@Override
public Object intercept(Object obj, Method method, Object[] args, MethodProxy proxy) throws Throwable {
if (cacheableMethods.contains(method)) {
return cache.load(new Cache.CacheKey(method, args),
() -> {
try {
return proxy.invokeSuper(obj, args);
} catch (Throwable throwable) {
if (throwable instanceof RuntimeException) {
throw (RuntimeException) throwable;
}
throw new JifaException(throwable);
}
});
}
return proxy.invokeSuper(obj, args);
}
}
| 6,694 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/cache/ProxyBuilder.java | /********************************************************************************
* Copyright (c) 2021, 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common.cache;
import net.sf.cglib.proxy.Enhancer;
public class ProxyBuilder {
private static <T> Enhancer buildEnhancer(Class<T> clazz) {
Enhancer e = new Enhancer();
e.setSuperclass(clazz);
e.setCallback(new Handler(clazz));
return e;
}
@SuppressWarnings("unchecked")
public static <T> T build(Class<T> clazz) {
return (T) buildEnhancer(clazz).create();
}
@SuppressWarnings("unchecked")
public static <T> T build(Class<T> clazz, Class<?>[] argTypes, Object[] args) {
return (T) buildEnhancer(clazz).create(argTypes, args);
}
}
| 6,695 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/cache/Cache.java | /********************************************************************************
* Copyright (c) 2021 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common.cache;
import com.google.common.cache.CacheBuilder;
import org.eclipse.jifa.common.JifaException;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
class Cache {
private final com.google.common.cache.Cache<CacheKey, Object> cache;
public Cache() {
cache = CacheBuilder
.newBuilder()
.softValues()
.recordStats()
.expireAfterAccess(10, TimeUnit.MINUTES)
.build();
}
@SuppressWarnings("unchecked")
public <V> V load(CacheKey key, Callable<V> loader) {
try {
return (V) cache.get(key, loader);
} catch (ExecutionException e) {
throw new JifaException(e);
}
}
static class CacheKey {
Method method;
Object[] args;
CacheKey(Method method, Object[] args) {
this.method = method;
this.args = args;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
CacheKey cacheKey = (CacheKey) o;
return method.equals(cacheKey.method) && Arrays.equals(args, cacheKey.args);
}
@Override
public int hashCode() {
int hash = method.hashCode();
return hash * 31 ^ Arrays.hashCode(args);
}
}
}
| 6,696 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/cache/Cacheable.java | /********************************************************************************
* Copyright (c) 2021 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common.cache;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
@Retention(value = RUNTIME)
@Target(ElementType.METHOD)
public @interface Cacheable {
}
| 6,697 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/util/PageViewBuilder.java | /********************************************************************************
* Copyright (c) 2020, 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common.util;
import org.eclipse.jifa.common.request.PagingRequest;
import org.eclipse.jifa.common.vo.PageView;
import org.eclipse.jifa.common.vo.support.Searchable;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.List;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.IntFunction;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
public class PageViewBuilder<A, B extends Searchable> {
// simple builder
public static <S, R> PageView<R> build(Callback<S> callback, PagingRequest paging, Function<S, R> mapper) {
List<R> result = IntStream.range(paging.from(), paging.to(callback.totalSize()))
.mapToObj(callback::get)
.map(mapper)
.collect(Collectors.toList());
return new PageView<>(paging, callback.totalSize(), result);
}
public static <R> PageView<R> build(Collection<R> total, PagingRequest paging) {
List<R> result = total.stream()
.skip(paging.from())
.limit(paging.getPageSize())
.collect(Collectors.toList());
return new PageView<>(paging, total.size(), result);
}
public static <S, R> PageView<R> build(Collection<S> total, PagingRequest paging, Function<S, R> mapper) {
List<R> result = total.stream()
.skip(paging.from())
.limit(paging.getPageSize())
.map(mapper)
.collect(Collectors.toList());
return new PageView<>(paging, total.size(), result);
}
public static <S, T, R> PageView<R> build(Collection<S> total, PagingRequest paging, Function<S, T> mapper1,
Function<T, R> mapper2,
Comparator<T> comparator) {
List<R> result = total.stream()
.map(mapper1)
.sorted(comparator)
.skip(paging.from())
.limit(paging.getPageSize())
.map(mapper2)
.collect(Collectors.toList());
return new PageView<>(paging, total.size(), result);
}
public static <R> PageView<R> build(int[] total, PagingRequest paging, IntFunction<R> mapper) {
List<R> result = Arrays.stream(total)
.skip(paging.from())
.limit(paging.getPageSize())
.mapToObj(mapper)
.collect(Collectors.toList());
return new PageView<>(paging, total.length, result);
}
public static <S, R> PageView<R> build(S[] total, PagingRequest paging, Function<S, R> mapper) {
List<R> result = Arrays.stream(total)
.skip(paging.from())
.limit(paging.getPageSize())
.map(mapper)
.collect(Collectors.toList());
return new PageView<>(paging, total.length, result);
}
public interface Callback<O> {
int totalSize();
O get(int index);
}
// complex builder
private List<A> list;
private Function<A, B> mapper;
private PagingRequest paging;
private Comparator<B> comparator;
private Predicate<B> filter;
private boolean noPagingNeeded;
private PageViewBuilder() {
}
public static <A, B extends Searchable> PageViewBuilder<A, B> fromList(List<A> list) {
PageViewBuilder<A, B> builder = new PageViewBuilder<>();
builder.list = list;
return builder;
}
public PageViewBuilder<A, B> beforeMap(Consumer<A> consumer) {
this.list.forEach(consumer);
return this;
}
public PageViewBuilder<A, B> map(Function<A, B> mapper) {
this.mapper = mapper;
return this;
}
public PageViewBuilder<A, B> sort(Comparator<B> mapper) {
this.comparator = mapper;
return this;
}
public PageViewBuilder<A, B> filter(Predicate<B> mapper) {
this.filter = mapper;
return this;
}
public PageViewBuilder<A, B> paging(PagingRequest paging) {
this.paging = paging;
return this;
}
public PageView<B> done() {
Stream<B> stream = list.stream().map(mapper);
if (filter != null) {
stream = stream.filter(filter);
}
if (comparator != null) {
stream = stream.sorted(comparator);
}
List<B> processedList = stream.collect(Collectors.toList());
// paging must be exist since this is PAGEVIEW builder.
List<B> finalList = processedList
.stream()
.skip(paging.from())
.limit(paging.getPageSize())
.collect(Collectors.toList());
return new PageView<>(paging,/*totalSize*/ processedList.size(), /*display list*/finalList);
}
}
| 6,698 |
0 | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common | Create_ds/eclipse-jifa/backend/common/src/main/java/org/eclipse/jifa/common/util/EscapeUtil.java | /********************************************************************************
* Copyright (c) 2022 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
********************************************************************************/
package org.eclipse.jifa.common.util;
import org.apache.commons.lang.StringEscapeUtils;
public class EscapeUtil {
public static String unescapeJava(String str) {
return StringEscapeUtils.unescapeJava(str);
}
public static String unescapeLabel(String str) {
if (str != null && str.endsWith("...")) {
int index = str.lastIndexOf("\\u");
if (index > 0) {
return unescapeJava(str.substring(0, index)) + "...";
}
return str;
}
return unescapeJava(str);
}
}
| 6,699 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.