index int64 0 0 | repo_id stringlengths 26 205 | file_path stringlengths 51 246 | content stringlengths 8 433k | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/HollowPOJOGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.isPrimitiveType;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.util.HollowWriteStateCreator;
import com.netflix.hollow.core.write.HollowWriteStateEngine;
import com.netflix.hollow.core.write.objectmapper.HollowObjectMapper;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
/**
* This class is used to generate java code which defines POJOs, which can in turn be used to populate a
* {@link HollowWriteStateEngine} via a {@link HollowObjectMapper}
*
* The generated java code is based on a data model (defined by a set of {@link HollowSchema}).
*
* You may also run the main() method directly.
*/
public class HollowPOJOGenerator {
/**
* An enumeration of possible arguments to the code generator when being called via the main
* function. Not expected to be used outside the library itself, except for documentation
* purposes.
* Unless otherwise noted, having repeated parameters results in the previous value being
* overwritten.
*/
public enum GeneratorArguments {
/**
* Add a class to the data model. Takes the fully qualified class name. This class must be
* available on the classpath. Having multiple of this parameter results in multiple classes
* being added to the data model.
*/
addToDataModel,
/**
* Add schema from a schema file to the data model. The schema file must be available on the
* classpath. Having multiple of this parameter results in multiple schemas being added to
* the data model.
*/
addSchemaFileToDataModel,
/**
* Sets the path the files with be generated in.
*/
pathToGeneratedFiles,
/**
* Sets the package name for the generated files.
*/
packageName,
/**
* Sets the suffix for the generated POJO class names.
*/
pojoClassNameSuffix;
}
private final String packageName;
private final String pojoClassNameSuffix;
private final HollowDataset dataset;
public HollowPOJOGenerator(String packageName, String pojoClassNameSuffix, HollowDataset dataset) {
this.packageName = packageName;
this.pojoClassNameSuffix = pojoClassNameSuffix;
this.dataset = dataset;
}
/**
* Usage: java HollowPOJOGenerator --argName1=argValue1 --argName2==argValue2. See {@link GeneratorArguments}
* for available arguments.
* @param args the arguments
* @throws IOException if the POJOs cannot be created
* @throws ClassNotFoundException if the class for a data type cannot be loaded
*/
public static void main(String[] args) throws IOException, ClassNotFoundException {
if (args.length == 0) {
System.out.println("Usage:\n"
+ "java " + HollowPOJOGenerator.class.getName() + " --arg1=value1 --arg2=value2\n"
+ "see " + GeneratorArguments.class.getName() + " for available arguments.");
return;
}
HollowWriteStateEngine engine = new HollowWriteStateEngine();
String packageName = null;
String pojoClassNameSuffix = null;
String pathToGeneratedFiles = null;
HollowObjectMapper mapper = new HollowObjectMapper(engine);
ArgumentParser<GeneratorArguments> argumentParser = new ArgumentParser(GeneratorArguments.class, args);
for (ArgumentParser<GeneratorArguments>.ParsedArgument arg : argumentParser.getParsedArguments()) {
switch (arg.getKey()) {
case addToDataModel:
mapper.initializeTypeState(HollowPOJOGenerator.class.getClassLoader().loadClass(arg.getValue()));
break;
case addSchemaFileToDataModel:
HollowWriteStateCreator.readSchemaFileIntoWriteState(arg.getValue(), engine);
break;
case pathToGeneratedFiles:
pathToGeneratedFiles = arg.getValue();
break;
case packageName:
packageName = arg.getValue();
break;
case pojoClassNameSuffix:
pojoClassNameSuffix = arg.getValue();
break;
default:
throw new IllegalArgumentException("Unhandled argument " + arg.getKey());
}
}
new HollowPOJOGenerator(packageName, pojoClassNameSuffix, engine).generateFiles(pathToGeneratedFiles);
}
public void generateFiles(String directory) throws IOException {
generateFiles(new File(directory));
}
public void generateFiles(File directory) throws IOException {
Path destinationPath = directory.toPath();
Path packagePath = Paths.get(packageName.replace(".", File.separator));
if (!destinationPath.toAbsolutePath().endsWith(packagePath)) {
destinationPath = destinationPath.resolve(packagePath);
}
directory = destinationPath.toFile();
if (!directory.exists()) directory.mkdirs();
for (HollowSchema schema : dataset.getSchemas()) {
if (schema instanceof HollowObjectSchema && !isPrimitiveType(schema.getName())) {
HollowPOJOClassGenerator generator = new HollowPOJOClassGenerator(dataset, (HollowObjectSchema) schema,
packageName, pojoClassNameSuffix);
FileWriter writer = new FileWriter(new File(directory, generator.getClassName() + ".java"));
writer.write(generator.generate());
writer.close();
}
}
}
}
| 9,300 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/HollowConsumerJavaFileGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.hollow.api.codegen;
import com.netflix.hollow.api.codegen.objects.HollowCollectionsGenerator;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.schema.HollowListSchema;
import com.netflix.hollow.core.schema.HollowMapSchema;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.schema.HollowSetSchema;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Not intended for external consumption.
*
* @see HollowAPIGenerator
*
* @author dsu
*/
public abstract class HollowConsumerJavaFileGenerator implements HollowJavaFileGenerator {
protected final String packageName;
protected final String subPackageName;
protected final CodeGeneratorConfig config;
protected final HollowDataset dataset;
protected String className;
protected boolean useCollectionsImport=false;
public HollowConsumerJavaFileGenerator(String packageName, String subPackageName, HollowDataset dataset,
CodeGeneratorConfig config) {
this.dataset = dataset;
this.packageName = packageName;
this.subPackageName = subPackageName;
this.config = config;
}
protected String hollowImplClassname(String typeName) {
return HollowCodeGenerationUtils.hollowImplClassname(typeName, config.getClassPostfix(),
config.isUseAggressiveSubstitutions(), config.isUseHollowPrimitiveTypes());
}
public String getSubPackageName() {
return subPackageName;
}
@Override
public final String getClassName() {
return className;
}
public void useCollectionsImport() {
this.useCollectionsImport=true;
}
protected void appendPackageAndCommonImports(StringBuilder builder) {
appendPackageAndCommonImports(builder, null, new ArrayList<>());
}
protected void appendPackageAndCommonImports(StringBuilder builder,
String apiClassname) {
appendPackageAndCommonImports(builder, apiClassname, new ArrayList<>());
}
protected void appendPackageAndCommonImports(StringBuilder builder,
String apiClassname, List<HollowSchema> schemasToImport) {
String fullPackageName =
createFullPackageName(packageName, subPackageName, config.isUsePackageGrouping());
if (!isEmpty(fullPackageName)) {
builder.append("package ").append(fullPackageName).append(";\n\n");
if (config.isUseHollowPrimitiveTypes()) {
builder.append("import com.netflix.hollow.core.type.*;\n");
}
if (config.isUsePackageGrouping()) {
if (apiClassname != null) {
appendImportFromBasePackage(builder, apiClassname);
}
Set<String> schemaNameSet = new HashSet<>();
for (HollowSchema schema : schemasToImport) {
switch (schema.getSchemaType()) {
case OBJECT:
addToSetIfNotPrimitiveOrCollection(schemaNameSet, schema.getName());
break;
case SET:
addToSetIfNotPrimitiveOrCollection(schemaNameSet,
((HollowSetSchema) schema).getElementType());
break;
case LIST:
addToSetIfNotPrimitiveOrCollection(schemaNameSet,
((HollowListSchema) schema).getElementType());
break;
case MAP:
HollowMapSchema mapSchema = (HollowMapSchema) schema;
addToSetIfNotPrimitiveOrCollection(schemaNameSet, mapSchema.getKeyType(),
mapSchema.getValueType());
break;
default:
throw new IllegalArgumentException(
"Unexpected HollowSchema to import: " + schema);
}
}
for (String schemaName : schemaNameSet) {
appendImportFromBasePackage(builder, HollowCodeGenerationUtils.upperFirstChar(schemaName) + config.getClassPostfix());
}
appendImportFromBasePackage(builder, "core.*");
if (useCollectionsImport) {
appendImportFromBasePackage(builder, HollowCollectionsGenerator.SUB_PACKAGE_NAME + ".*");
}
builder.append("\n");
}
}
}
private String createFullPackageName(String packageName, String subPackageName, boolean usePackageGrouping) {
if (usePackageGrouping && !isEmpty(packageName) && !isEmpty(subPackageName)) {
return packageName + "." + subPackageName;
} else {
return packageName;
}
}
private boolean isEmpty(String value) {
return value == null || value.trim().isEmpty();
}
private void appendImportFromBasePackage(StringBuilder builder, String leaf) {
builder.append("import ").append(packageName).append(".").append(leaf).append(";\n");
}
/**
* Adds the schema name to the set if the schema name doesn't correspond to a Hollow
* primitive type. Factored out to prevent bloat in the switch statement it is called
* from.
*/
private void addToSetIfNotPrimitiveOrCollection(Set<String> schemaNameSet, String... schemaNames) {
for (String schemaName : schemaNames) {
// collections schemas get brought in by a star import
if (!HollowCodeGenerationUtils.isCollectionType(schemaName, dataset) &&
!HollowCodeGenerationUtils.isPrimitiveType(schemaName)) {
schemaNameSet.add(schemaName);
}
}
}
}
| 9,301 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/HollowCodeGenerationUtils.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen;
import com.netflix.hollow.api.objects.delegate.HollowListCachedDelegate;
import com.netflix.hollow.api.objects.delegate.HollowListDelegate;
import com.netflix.hollow.api.objects.delegate.HollowListLookupDelegate;
import com.netflix.hollow.api.objects.delegate.HollowMapCachedDelegate;
import com.netflix.hollow.api.objects.delegate.HollowMapDelegate;
import com.netflix.hollow.api.objects.delegate.HollowMapLookupDelegate;
import com.netflix.hollow.api.objects.delegate.HollowSetCachedDelegate;
import com.netflix.hollow.api.objects.delegate.HollowSetDelegate;
import com.netflix.hollow.api.objects.delegate.HollowSetLookupDelegate;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.schema.HollowListSchema;
import com.netflix.hollow.core.schema.HollowMapSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.schema.HollowSetSchema;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* A class containing convenience methods for the {@link HollowAPIGenerator}. Not intended for external consumption.
*/
public class HollowCodeGenerationUtils {
private static final Set<String> PRIMITIVE_TYPES = new HashSet<>();
private static final Map<String,String> DEFAULT_CLASS_NAME_SUBSTITUTIONS = new HashMap<String,String>();
private static final Map<String,String> AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS = new HashMap<String,String>();
static {
for(Class<?> clzz : Arrays.asList(Boolean.class, Integer.class, Long.class, Float.class, Double.class, String.class)) {
PRIMITIVE_TYPES.add(clzz.getSimpleName());
}
DEFAULT_CLASS_NAME_SUBSTITUTIONS.put("String", "HString");
DEFAULT_CLASS_NAME_SUBSTITUTIONS.put("Integer", "HInteger");
DEFAULT_CLASS_NAME_SUBSTITUTIONS.put("Long", "HLong");
DEFAULT_CLASS_NAME_SUBSTITUTIONS.put("Float", "HFloat");
DEFAULT_CLASS_NAME_SUBSTITUTIONS.put("Double", "HDouble");
DEFAULT_CLASS_NAME_SUBSTITUTIONS.put("Boolean", "HBoolean");
DEFAULT_CLASS_NAME_SUBSTITUTIONS.put("Object", "HObject");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("AbstractMethodError", "HAbstractMethodError");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Appendable", "HAppendable");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("ArithmeticException", "HArithmeticException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("ArrayIndexOutOfBoundsException", "HArrayIndexOutOfBoundsException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("ArrayStoreException", "HArrayStoreException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("AssertionError", "HAssertionError");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("AutoCloseable", "HAutoCloseable");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Boolean", "HBoolean");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("BootstrapMethodError", "HBootstrapMethodError");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Byte", "HByte");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("CharSequence", "HCharSequence");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Character", "HCharacter");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Class", "HClass");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("ClassCastException", "HClassCastException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("ClassCircularityError", "HClassCircularityError");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("ClassFormatError", "HClassFormatError");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("ClassLoader", "HClassLoader");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("ClassNotFoundException", "HClassNotFoundException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("ClassValue", "HClassValue");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("CloneNotSupportedException", "HCloneNotSupportedException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Cloneable", "HCloneable");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Comparable", "HComparable");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Compiler", "HCompiler");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Deprecated", "HDeprecated");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Double", "HDouble");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Enum", "HEnum");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("EnumConstantNotPresentException", "HEnumConstantNotPresentException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Error", "HError");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Exception", "HException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("ExceptionInInitializerError", "HExceptionInInitializerError");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Float", "HFloat");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("IllegalAccessError", "HIllegalAccessError");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("IllegalAccessException", "HIllegalAccessException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("IllegalArgumentException", "HIllegalArgumentException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("IllegalMonitorStateException", "HIllegalMonitorStateException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("IllegalStateException", "HIllegalStateException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("IllegalThreadStateException", "HIllegalThreadStateException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("IncompatibleClassChangeError", "HIncompatibleClassChangeError");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("IndexOutOfBoundsException", "HIndexOutOfBoundsException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("InheritableThreadLocal", "HInheritableThreadLocal");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("InstantiationError", "HInstantiationError");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("InstantiationException", "HInstantiationException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Integer", "HInteger");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("InternalError", "HInternalError");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("InterruptedException", "HInterruptedException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Iterable", "HIterable");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("LinkageError", "HLinkageError");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Long", "HLong");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Math", "HMath");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("NegativeArraySizeException", "HNegativeArraySizeException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("NoClassDefFoundError", "HNoClassDefFoundError");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("NoSuchFieldError", "HNoSuchFieldError");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("NoSuchFieldException", "HNoSuchFieldException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("NoSuchMethodError", "HNoSuchMethodError");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("NoSuchMethodException", "HNoSuchMethodException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("NullPointerException", "HNullPointerException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Number", "HNumber");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("NumberFormatException", "HNumberFormatException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Object", "HObject");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("OutOfMemoryError", "HOutOfMemoryError");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Override", "HOverride");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Package", "HPackage");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Process", "HProcess");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("ProcessBuilder", "HProcessBuilder");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Readable", "HReadable");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("ReflectiveOperationException", "HReflectiveOperationException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Runnable", "HRunnable");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Runtime", "HRuntime");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("RuntimeException", "HRuntimeException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("RuntimePermission", "HRuntimePermission");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("SafeVarargs", "HSafeVarargs");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("SecurityException", "HSecurityException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("SecurityManager", "HSecurityManager");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Short", "HShort");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("StackOverflowError", "HStackOverflowError");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("StackTraceElement", "HStackTraceElement");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("StrictMath", "HStrictMath");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("String", "HString");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("StringBuffer", "HStringBuffer");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("StringBuilder", "HStringBuilder");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("StringIndexOutOfBoundsException", "HStringIndexOutOfBoundsException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("SuppressWarnings", "HSuppressWarnings");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("System", "HSystem");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Thread", "HThread");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("ThreadDeath", "HThreadDeath");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("ThreadGroup", "HThreadGroup");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("ThreadLocal", "HThreadLocal");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Throwable", "HThrowable");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("TypeNotPresentException", "HTypeNotPresentException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("UnknownError", "HUnknownError");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("UnsatisfiedLinkError", "HUnsatisfiedLinkError");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("UnsupportedClassVersionError", "HUnsupportedClassVersionError");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("UnsupportedOperationException", "HUnsupportedOperationException");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("VerifyError", "HVerifyError");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("VirtualMachineError", "HVirtualMachineError");
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.put("Void", "HVoid");
}
public static String typeAPIClassname(String typeName) {
return uppercase(typeName) + "TypeAPI";
}
public static String hollowFactoryClassname(String typeName) {
return substituteInvalidChars(uppercase(typeName)) + "HollowFactory";
}
public static String hollowObjectProviderName(String typeName) {
return substituteInvalidChars(lowercase(typeName)) + "Provider";
}
public static String hollowImplClassname(String typeName, String classPostfix,
boolean useAggressiveSubstitutions, boolean useHollowPrimitives) {
String classname = substituteInvalidChars(uppercase(typeName));
if (!useHollowPrimitives && !"".equals(classPostfix)) {
// skip substitutions here to preserve legacy behaviour
return classname + classPostfix;
}
String sub = useAggressiveSubstitutions ?
AGGRESSIVE_CLASS_NAME_SUBSTITUTIONS.get(classname) :
DEFAULT_CLASS_NAME_SUBSTITUTIONS.get(classname);
return sub == null ? classname + classPostfix : sub;
}
public static String delegateInterfaceName(String typeName) {
return substituteInvalidChars(uppercase(typeName)) + "Delegate";
}
public static String delegateInterfaceName(HollowSchema schema) {
if(schema instanceof HollowObjectSchema)
return delegateInterfaceName(schema.getName());
if(schema instanceof HollowListSchema)
return HollowListDelegate.class.getSimpleName();
if(schema instanceof HollowSetSchema)
return HollowSetDelegate.class.getSimpleName();
if(schema instanceof HollowMapSchema)
return HollowMapDelegate.class.getSimpleName();
throw new UnsupportedOperationException("What kind of schema is a " + schema.getClass().getSimpleName() + "?");
}
public static String delegateCachedImplName(String typeName) {
return substituteInvalidChars(uppercase(typeName)) + "DelegateCachedImpl";
}
public static String delegateCachedClassname(HollowSchema schema) {
if(schema instanceof HollowObjectSchema)
return delegateCachedImplName(schema.getName());
if(schema instanceof HollowListSchema)
return HollowListCachedDelegate.class.getSimpleName();
if(schema instanceof HollowSetSchema)
return HollowSetCachedDelegate.class.getSimpleName();
if(schema instanceof HollowMapSchema)
return HollowMapCachedDelegate.class.getSimpleName();
throw new UnsupportedOperationException("What kind of schema is a " + schema.getClass().getSimpleName() + "?");
}
public static String delegateLookupImplName(String typeName) {
return substituteInvalidChars(uppercase(typeName)) + "DelegateLookupImpl";
}
public static String delegateLookupClassname(HollowSchema schema) {
if(schema instanceof HollowObjectSchema)
return delegateLookupImplName(schema.getName());
if(schema instanceof HollowListSchema)
return HollowListLookupDelegate.class.getSimpleName();
if(schema instanceof HollowSetSchema)
return HollowSetLookupDelegate.class.getSimpleName();
if(schema instanceof HollowMapSchema)
return HollowMapLookupDelegate.class.getSimpleName();
throw new UnsupportedOperationException("What kind of schema is a " + schema.getClass().getSimpleName() + "?");
}
public static String lowercase(String str) {
if(str == null || str.length() == 0)
return str;
StringBuilder builder = new StringBuilder();
builder.append(str.substring(0, 1).toLowerCase());
builder.append(str.substring(1));
return builder.toString();
}
public static String uppercase(String str) {
return upperFirstChar(str);
}
public static String upperFirstChar(String str) {
if(str == null || str.length() == 0)
return str;
StringBuilder builder = new StringBuilder();
builder.append(str.substring(0, 1).toUpperCase());
builder.append(str.substring(1));
return builder.toString();
}
public static String substituteInvalidChars(String str) {
str = str.replace(' ', '_');
str = str.replace('.', '_');
return str;
}
public static String getJavaBoxedType(FieldType fieldType) {
switch(fieldType) {
case BOOLEAN:
return "Boolean";
case BYTES:
return "byte[]";
case DOUBLE:
return "Double";
case FLOAT:
return "Float";
case LONG:
return "Long";
case INT:
case REFERENCE:
return "Integer";
case STRING:
return "String";
}
throw new IllegalArgumentException("Java boxed type is not known for FieldType." + fieldType.toString());
}
public static String getJavaScalarType(FieldType fieldType) {
switch(fieldType) {
case BOOLEAN:
return "boolean";
case BYTES:
return "byte[]";
case DOUBLE:
return "double";
case FLOAT:
return "float";
case LONG:
return "long";
case INT:
case REFERENCE:
return "int";
case STRING:
return "String";
}
throw new IllegalArgumentException("Java scalar type is not known for FieldType." + fieldType.toString());
}
private static final Set<String> booleanMethodPrefixes = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(
"is", "has", "do", "should", "was", "contains", "enable", "disable", "get")));
public static Set<String> getBooleanMethodPrefixes() { return booleanMethodPrefixes; }
/**
* Rules: prepend "get" / "is" + upper case first char of field name
*
* boolean/Boolean field:
* - has a boolean prefix (@see {@link #booleanMethodPrefixes}), just return it; otherwise, prepend "get" + upper case first char
*
* boolean isPrimary - isPrimary()
* boolean hasStreams - hasStreams()
* boolean playable - getPlayable()
* boolean value - getValue()
*
* other field type: prepend "get" + upper case first char
*
* String title - getTitle()
*
* @param fieldName
* name of field
* @param clazz
* type of field
* @return accessor method name
*/
public static String generateAccessortMethodName(String fieldName, Class<?> clazz) {
String prefix = "get";
if (boolean.class.equals(clazz) || Boolean.class.equals(clazz)) {
for (String booleanPrefix : booleanMethodPrefixes) {
if (fieldName.startsWith(booleanPrefix) && fieldName.length() > booleanPrefix.length()) {
char firstCharAfterBooleanPrefix = fieldName.charAt(booleanPrefix.length());
if (Character.isUpperCase(firstCharAfterBooleanPrefix)) {
return fieldName;
}
}
}
}
return substituteInvalidChars(prefix + uppercase(fieldName));
}
public static String generateBooleanAccessorMethodName(String fieldName, boolean useBooleanFieldErgonomics) {
return useBooleanFieldErgonomics ? generateAccessortMethodName(fieldName, boolean.class) : "get" + uppercase(fieldName);
}
/**
* Convert field path into Param name
*
* Eg:
* - Actor {@literal->} actor
* - Actor.name {@literal->} actorName
*
* @param fieldPath the field path
* @return the param name
*/
public static String normalizeFieldPathToParamName(String fieldPath) {
String result = null;
if (fieldPath.contains(".")) {
String[] parts = fieldPath.split("\\.");
StringBuilder sb = new StringBuilder();
sb.append(lowercase(parts[0]));
for (int i = 1; i < parts.length; i++) {
sb.append(uppercase(parts[i]));
}
result = sb.toString();
} else {
result = lowercase(fieldPath);
}
if (result.endsWith("!")) {
return result.substring(0, result.length() - 1);
}
return result;
}
public static boolean isPrimitiveType(String type) {
return PRIMITIVE_TYPES.contains(type);
}
public static Set<String> getPrimitiveTypes(Collection<HollowSchema> schemaList) {
Set<String> primitiveTypes = new HashSet<>();
for (HollowSchema schema : schemaList) {
String type = schema.getName();
if (!isPrimitiveType(type)) continue;
primitiveTypes.add(type);
}
return primitiveTypes;
}
public static boolean isCollectionType(String schemaName, HollowDataset dataset) {
return dataset.getSchema(schemaName).getSchemaType() != HollowSchema.SchemaType.OBJECT;
}
}
| 9,302 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/HollowJavaFileGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen;
/**
* Not intended for external consumption.
*
* @see HollowAPIGenerator
*
* @author dkoszewnik
*
*/
public interface HollowJavaFileGenerator {
public String getClassName();
public String generate();
} | 9,303 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/HollowAPIClassJavaGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.hollowFactoryClassname;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.hollowObjectProviderName;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.lowercase;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.typeAPIClassname;
import com.netflix.hollow.api.consumer.HollowConsumerAPI;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.api.objects.provider.HollowFactory;
import com.netflix.hollow.api.objects.provider.HollowObjectCacheProvider;
import com.netflix.hollow.api.objects.provider.HollowObjectFactoryProvider;
import com.netflix.hollow.api.objects.provider.HollowObjectProvider;
import com.netflix.hollow.api.sampling.HollowObjectCreationSampler;
import com.netflix.hollow.api.sampling.HollowSamplingDirector;
import com.netflix.hollow.api.sampling.SampleResult;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowListTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowMapTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowSetTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.missing.HollowListMissingDataAccess;
import com.netflix.hollow.core.read.dataaccess.missing.HollowMapMissingDataAccess;
import com.netflix.hollow.core.read.dataaccess.missing.HollowObjectMissingDataAccess;
import com.netflix.hollow.core.read.dataaccess.missing.HollowSetMissingDataAccess;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.schema.HollowSchemaSorter;
import com.netflix.hollow.core.util.AllHollowRecordCollection;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
/**
* This class contains template logic for generating a {@link HollowAPI} implementation. Not intended for external consumption.
*
* @see HollowAPIGenerator
*/
public class HollowAPIClassJavaGenerator extends HollowConsumerJavaFileGenerator {
public static final String SUB_PACKAGE_NAME = "";
private final boolean parameterizeClassNames;
public HollowAPIClassJavaGenerator(String packageName, String apiClassname, HollowDataset dataset, boolean parameterizeClassNames, CodeGeneratorConfig config) {
super(packageName, SUB_PACKAGE_NAME, dataset, config);
this.className = apiClassname;
this.parameterizeClassNames = parameterizeClassNames;
}
@Override
public String generate() {
List<HollowSchema> schemaList = HollowSchemaSorter.dependencyOrderedSchemaList(dataset);
StringBuilder builder = new StringBuilder();
appendPackageAndCommonImports(builder);
builder.append("import ").append(Objects.class.getName()).append(";\n");
builder.append("import ").append(Collection.class.getName()).append(";\n");
builder.append("import ").append(Collections.class.getName()).append(";\n");
builder.append("import ").append(Set.class.getName()).append(";\n");
builder.append("import ").append(Map.class.getName()).append(";\n");
builder.append("import ").append(HollowConsumerAPI.class.getName()).append(";\n");
builder.append("import ").append(HollowAPI.class.getName()).append(";\n");
builder.append("import ").append(HollowDataAccess.class.getName()).append(";\n");
builder.append("import ").append(HollowTypeDataAccess.class.getName()).append(";\n");
builder.append("import ").append(HollowObjectTypeDataAccess.class.getName()).append(";\n");
builder.append("import ").append(HollowListTypeDataAccess.class.getName()).append(";\n");
builder.append("import ").append(HollowSetTypeDataAccess.class.getName()).append(";\n");
builder.append("import ").append(HollowMapTypeDataAccess.class.getName()).append(";\n");
builder.append("import ").append(HollowObjectMissingDataAccess.class.getName()).append(";\n");
builder.append("import ").append(HollowListMissingDataAccess.class.getName()).append(";\n");
builder.append("import ").append(HollowSetMissingDataAccess.class.getName()).append(";\n");
builder.append("import ").append(HollowMapMissingDataAccess.class.getName()).append(";\n");
builder.append("import ").append(HollowFactory.class.getName()).append(";\n");
builder.append("import ").append(HollowObjectProvider.class.getName()).append(";\n");
builder.append("import ").append(HollowObjectCacheProvider.class.getName()).append(";\n");
builder.append("import ").append(HollowObjectFactoryProvider.class.getName()).append(";\n");
builder.append("import ").append(HollowObjectCreationSampler.class.getName()).append(";\n");
builder.append("import ").append(HollowSamplingDirector.class.getName()).append(";\n");
builder.append("import ").append(SampleResult.class.getName()).append(";\n");
builder.append("import ").append(AllHollowRecordCollection.class.getName()).append(";\n");
builder.append("\n@SuppressWarnings(\"all\")\n");
builder.append("public class ").append(className).append(" extends HollowAPI ");
Set<String> primitiveTypes = HollowCodeGenerationUtils.getPrimitiveTypes(schemaList); // Implement Primitive Type Retriever(s)
if (config.isUseHollowPrimitiveTypes() && !primitiveTypes.isEmpty()) {
builder.append("implements ");
int itemCount = 0;
for(String pType : primitiveTypes) {
if (itemCount++ > 0) builder.append(",");
builder.append(" HollowConsumerAPI.").append(HollowCodeGenerationUtils.upperFirstChar(pType)).append("Retriever");
}
}
builder.append(" {\n\n");
builder.append(" private final HollowObjectCreationSampler objectCreationSampler;\n\n");
for (HollowSchema schema : schemaList) {
builder.append(" private final " + typeAPIClassname(schema.getName())).append(" ").append(lowercase(typeAPIClassname(schema.getName()))).append(";\n");
}
builder.append("\n");
for(HollowSchema schema : schemaList) {
builder.append(" private final HollowObjectProvider ").append(hollowObjectProviderName(schema.getName())).append(";\n");
}
builder.append("\n");
builder.append(" public ").append(className).append("(HollowDataAccess dataAccess) {\n");
builder.append(" this(dataAccess, Collections.<String>emptySet());\n");
builder.append(" }\n\n");
builder.append(" public ").append(className).append("(HollowDataAccess dataAccess, Set<String> cachedTypes) {\n");
builder.append(" this(dataAccess, cachedTypes, Collections.<String, HollowFactory<?>>emptyMap());\n");
builder.append(" }\n\n");
builder.append(" public ").append(className).append("(HollowDataAccess dataAccess, Set<String> cachedTypes, Map<String, HollowFactory<?>> factoryOverrides) {\n");
builder.append(" this(dataAccess, cachedTypes, factoryOverrides, null);\n");
builder.append(" }\n\n");
builder.append(" public ").append(className).append("(HollowDataAccess dataAccess, Set<String> cachedTypes, Map<String, HollowFactory<?>> factoryOverrides, ").append(className).append(" previousCycleAPI) {\n");
builder.append(" super(dataAccess);\n");
builder.append(" HollowTypeDataAccess typeDataAccess;\n");
builder.append(" HollowFactory factory;\n\n");
builder.append(" objectCreationSampler = new HollowObjectCreationSampler(");
for(int i=0;i<schemaList.size();i++) {
builder.append("\"").append(schemaList.get(i).getName()).append("\"");
if(i < schemaList.size() - 1)
builder.append(",");
}
builder.append(");\n\n");
for (HollowSchema schema : schemaList) {
builder.append(" typeDataAccess = dataAccess.getTypeDataAccess(\"").append(schema.getName()).append("\");\n");
builder.append(" if(typeDataAccess != null) {\n");
builder.append(" ").append(lowercase(typeAPIClassname(schema.getName()))).append(" = new ").append(typeAPIClassname(schema.getName())).append("(this, (Hollow").append(schemaType(schema)).append("TypeDataAccess)typeDataAccess);\n");
builder.append(" } else {\n");
builder.append(" ").append(lowercase(typeAPIClassname(schema.getName()))).append(" = new ").append(typeAPIClassname(schema.getName())).append("(this, new Hollow").append(schemaType(schema)).append("MissingDataAccess(dataAccess, \"").append(schema.getName()).append("\"));\n");
builder.append(" }\n");
builder.append(" addTypeAPI(").append(lowercase(typeAPIClassname(schema.getName()))).append(");\n");
builder.append(" factory = factoryOverrides.get(\"").append(schema.getName()).append("\");\n");
builder.append(" if(factory == null)\n");
builder.append(" factory = new ").append(hollowFactoryClassname(schema.getName())).append("();\n");
builder.append(" if(cachedTypes.contains(\"").append(schema.getName()).append("\")) {\n");
builder.append(" HollowObjectCacheProvider previousCacheProvider = null;\n");
builder.append(" if(previousCycleAPI != null && (previousCycleAPI.").append(hollowObjectProviderName(schema.getName())).append(" instanceof HollowObjectCacheProvider))\n");
builder.append(" previousCacheProvider = (HollowObjectCacheProvider) previousCycleAPI.").append(hollowObjectProviderName(schema.getName())).append(";\n");
builder.append(" ").append(hollowObjectProviderName(schema.getName())).append(" = new HollowObjectCacheProvider(typeDataAccess, ").append(lowercase(typeAPIClassname(schema.getName()))).append(", factory, previousCacheProvider);\n");
builder.append(" } else {\n");
builder.append(" ").append(hollowObjectProviderName(schema.getName())).append(" = new HollowObjectFactoryProvider(typeDataAccess, ").append(lowercase(typeAPIClassname(schema.getName()))).append(", factory);\n");
builder.append(" }\n\n");
}
builder.append(" }\n\n");
builder.append("/*\n * Cached objects are no longer accessible after this method is called and an attempt to access them will cause an IllegalStateException.\n */\n");
builder.append(" public void detachCaches() {\n");
for(HollowSchema schema : schemaList) {
builder.append(" if(").append(hollowObjectProviderName(schema.getName())).append(" instanceof HollowObjectCacheProvider)\n");
builder.append(" ((HollowObjectCacheProvider)").append(hollowObjectProviderName(schema.getName())).append(").detach();\n");
}
builder.append(" }\n\n");
for (HollowSchema schema : schemaList) {
builder.append(" public ").append(typeAPIClassname(schema.getName())).append(" get" + typeAPIClassname(schema.getName())).append("() {\n");
builder.append(" return ").append(lowercase(typeAPIClassname(schema.getName()))).append(";\n");
builder.append(" }\n");
}
for(int i=0;i<schemaList.size();i++) {
HollowSchema schema = schemaList.get(i);
if(parameterizeClassNames) {
builder.append(" public <T> Collection<T> getAll").append(hollowImplClassname(schema.getName())).append("() {\n");
builder.append(" HollowTypeDataAccess tda = Objects.requireNonNull(getDataAccess().getTypeDataAccess(\"").append(schema.getName()).append("\"), \"type not loaded or does not exist in dataset; type=").append(schema.getName()).append("\");\n");
builder.append(" return new AllHollowRecordCollection<T>(tda.getTypeState()) {\n");
builder.append(" protected T getForOrdinal(int ordinal) {\n");
builder.append(" return get").append(hollowImplClassname(schema.getName())).append("(ordinal);\n");
builder.append(" }\n");
builder.append(" };\n");
builder.append(" }\n");
builder.append(" public <T> T get").append(hollowImplClassname(schema.getName())).append("(int ordinal) {\n");
builder.append(" objectCreationSampler.recordCreation(").append(i).append(");\n");
builder.append(" return (T) ").append(hollowObjectProviderName(schema.getName())).append(".getHollowObject(ordinal);\n");
builder.append(" }\n");
} else {
String hollowImplClassname = hollowImplClassname(schema.getName());
builder.append(" public Collection<"+hollowImplClassname+"> getAll").append(hollowImplClassname).append("() {\n");
builder.append(" HollowTypeDataAccess tda = Objects.requireNonNull(getDataAccess().getTypeDataAccess(\"").append(schema.getName()).append("\"), \"type not loaded or does not exist in dataset; type=").append(schema.getName()).append("\");\n");
builder.append(" return new AllHollowRecordCollection<"+hollowImplClassname+">(tda.getTypeState()) {\n");
builder.append(" protected "+hollowImplClassname+" getForOrdinal(int ordinal) {\n");
builder.append(" return get").append(hollowImplClassname).append("(ordinal);\n");
builder.append(" }\n");
builder.append(" };\n");
builder.append(" }\n");
builder.append(" public ").append(hollowImplClassname).append(" get").append(hollowImplClassname).append("(int ordinal) {\n");
builder.append(" objectCreationSampler.recordCreation(").append(i).append(");\n");
builder.append(" return (").append(hollowImplClassname).append(")").append(hollowObjectProviderName(schema.getName())).append(".getHollowObject(ordinal);\n");
builder.append(" }\n");
}
}
builder.append(" public void setSamplingDirector(HollowSamplingDirector director) {\n");
builder.append(" super.setSamplingDirector(director);\n");
builder.append(" objectCreationSampler.setSamplingDirector(director);\n");
builder.append(" }\n\n");
builder.append(" public Collection<SampleResult> getObjectCreationSamplingResults() {\n");
builder.append(" return objectCreationSampler.getSampleResults();\n");
builder.append(" }\n\n");
builder.append("}\n");
return builder.toString();
}
private String schemaType(HollowSchema schema) {
switch(schema.getSchemaType()) {
case OBJECT:
return "Object";
case LIST:
return "List";
case SET:
return "Set";
case MAP:
return "Map";
default:
throw new IllegalArgumentException();
}
}
}
| 9,304 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/AbstractHollowAPIGeneratorBuilder.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.hollow.api.codegen;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.write.HollowWriteStateEngine;
import com.netflix.hollow.core.write.objectmapper.HollowObjectMapper;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.Set;
/**
* Abstract Hollow API Generator Builder - to be extended to create customized Builders
*
* @author dsu
*/
public abstract class AbstractHollowAPIGeneratorBuilder<B extends AbstractHollowAPIGeneratorBuilder<?, ?>, G extends HollowAPIGenerator> {
protected String apiClassname;
protected String packageName;
protected HollowDataset dataset;
protected Set<String> parameterizedTypes = Collections.emptySet();
protected boolean parameterizeAllClassNames = false;
protected boolean useErgonomicShortcuts = false;
protected Path destinationPath;
protected CodeGeneratorConfig config = new CodeGeneratorConfig();
protected abstract G instantiateGenerator();
protected abstract B getBuilder();
public B withAPIClassname(String apiClassname) {
this.apiClassname = apiClassname;
return getBuilder();
}
public B withPackageName(String packageName) {
this.packageName = packageName;
return getBuilder();
}
public B withDataModel(HollowDataset dataset) {
this.dataset = dataset;
return getBuilder();
}
public B withDataModel(Class<?> ... classes) {
HollowWriteStateEngine writeEngine = new HollowWriteStateEngine();
HollowObjectMapper mapper = new HollowObjectMapper(writeEngine);
for(Class<?> clazz : classes) {
mapper.initializeTypeState(clazz);
}
return withDataModel(writeEngine);
}
public B withParameterizedTypes(Set<String> parameterizedTypes) {
this.parameterizedTypes = parameterizedTypes;
return getBuilder();
}
public B withParameterizeAllClassNames(boolean parameterizeAllClassNames) {
this.parameterizeAllClassNames = parameterizeAllClassNames;
return getBuilder();
}
public B withClassPostfix(String classPostfix) {
config.setClassPostfix(classPostfix);
return getBuilder();
}
public B withGetterPrefix(String getterPrefix) {
config.setGetterPrefix(getterPrefix);
return getBuilder();
}
public B withAggressiveSubstitutions(boolean useAggressiveSubstitutions) {
config.setUseAggressiveSubstitutions(useAggressiveSubstitutions);
return getBuilder();
}
public B withErgonomicShortcuts() {
this.useErgonomicShortcuts = true;
return getBuilder();
}
public B withPackageGrouping() {
config.setUsePackageGrouping(true);
return getBuilder();
}
public B withBooleanFieldErgonomics(boolean useBooleanFieldErgonomics) {
config.setUseBooleanFieldErgonomics(useBooleanFieldErgonomics);
return getBuilder();
}
public B reservePrimaryKeyIndexForTypeWithPrimaryKey(boolean reservePrimaryKeyIndexForTypeWithPrimaryKey) {
config.setReservePrimaryKeyIndexForTypeWithPrimaryKey(reservePrimaryKeyIndexForTypeWithPrimaryKey);
return getBuilder();
}
/**
* NOTE: Have to be enabled with withErgonomicShortcuts
* @return this builder
*/
public B withRestrictApiToFieldType() {
config.setRestrictApiToFieldType(true);
return getBuilder();
}
public B withHollowPrimitiveTypes(boolean useHollowPrimitiveTypes) {
config.setUseHollowPrimitiveTypes(useHollowPrimitiveTypes);
return getBuilder();
}
public B withVerboseToString(boolean useVerboseToString) {
config.setUseVerboseToString(useVerboseToString);
return getBuilder();
}
public B withDestination(String destinationPath) {
return withDestination(Paths.get(destinationPath));
}
public B withDestination(Path destinationPath) {
this.destinationPath = destinationPath;
return getBuilder();
}
public G build() {
if (apiClassname == null)
throw new IllegalStateException("Please specify an API classname (.withAPIClassname()) before calling .build()");
if (packageName == null)
throw new IllegalStateException("Please specify a package name (.withPackageName()) before calling .build()");
if (dataset == null)
throw new IllegalStateException("Please specify a data model (.withDataModel()) before calling .build()");
if(config.isRestrictApiToFieldType() && !useErgonomicShortcuts) {
throw new IllegalStateException(" restrictApiToFieldType requires withErgonomicShortcuts");
}
G generator = instantiateGenerator();
generator.setCodeGeneratorConfig(config);
return generator;
}
}
| 9,305 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/HollowPOJOClassGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.getJavaScalarType;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.lowercase;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.substituteInvalidChars;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.uppercase;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.core.schema.HollowListSchema;
import com.netflix.hollow.core.schema.HollowMapSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.schema.HollowSetSchema;
import com.netflix.hollow.core.write.objectmapper.HollowInline;
import com.netflix.hollow.core.write.objectmapper.HollowPrimaryKey;
import com.netflix.hollow.core.write.objectmapper.HollowTypeName;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
/**
* This class contains template logic for generating POJOs. Not intended for external consumption.
*
* @see HollowPOJOGenerator
*/
public class HollowPOJOClassGenerator implements HollowJavaFileGenerator {
private final HollowDataset dataset;
private final HollowObjectSchema schema;
private final String className;
private final String classNameSuffix;
private final String packageName;
private final boolean memoizeOrdinal;
private final Set<Class<?>> importClasses;
public HollowPOJOClassGenerator(HollowDataset dataset, HollowObjectSchema schema,
String packageName, String classNameSuffix) {
this(dataset, schema, packageName, classNameSuffix, false);
}
public HollowPOJOClassGenerator(HollowDataset dataset, HollowObjectSchema schema,
String packageName, String classNameSuffix, boolean memoizeOrdinal) {
this.dataset = dataset;
this.schema = schema;
this.packageName = packageName;
this.classNameSuffix = classNameSuffix;
this.className = buildClassName(schema.getName(), classNameSuffix);
this.importClasses = new HashSet<Class<?>>();
this.memoizeOrdinal = memoizeOrdinal;
}
private static String buildClassName(String name, String suffix) {
if (suffix == null) return name;
return name + suffix;
}
@Override
public String getClassName() {
return className;
}
@Override
public String generate() {
StringBuilder classBodyBuilder = new StringBuilder();
importClasses.add(HollowTypeName.class);
classBodyBuilder.append("@SuppressWarnings(\"all\")\n");
classBodyBuilder.append("@HollowTypeName(name=\"").append(schema.getName()).append("\")\n");
generateHollowPrimaryKeyAnnotation(classBodyBuilder);
classBodyBuilder.append("public class ").append(getClassName()).append(" implements Cloneable {\n");
generateInstanceVariables(classBodyBuilder);
classBodyBuilder.append("\n");
generateConstructorForPrimaryKey(classBodyBuilder);
generateChainableSetters(classBodyBuilder);
generateChainableAddForSetAndList(classBodyBuilder);
generateEqualsMethod(classBodyBuilder);
generateHashCodeMethod(classBodyBuilder);
generateToStringMethod(classBodyBuilder);
generateCloneMethod(classBodyBuilder);
classBodyBuilder.append(" }\n\n");
if (memoizeOrdinal) {
classBodyBuilder.append(" private long __assigned_ordinal = -1;\n");
}
classBodyBuilder.append("}");
StringBuilder builder = new StringBuilder();
builder.append("package ").append(packageName).append(";\n\n");
List<String> importClassNames = new ArrayList<String>();
for (Class<?> c : importClasses) {
importClassNames.add(c.getName());
}
Collections.sort(importClassNames);
for (String className : importClassNames) {
builder.append("import ").append(className).append(";\n");
}
builder.append("\n").append(classBodyBuilder);
return builder.toString();
}
private void generateHollowPrimaryKeyAnnotation(StringBuilder classBodyBuilder) {
PrimaryKey primaryKey = schema.getPrimaryKey();
if (primaryKey == null) {
return;
}
importClasses.add(HollowPrimaryKey.class);
classBodyBuilder.append("@HollowPrimaryKey(fields={");
for (int i = 0; i < primaryKey.numFields(); i++) {
if (i > 0) {
classBodyBuilder.append(", ");
}
classBodyBuilder.append("\"").append(primaryKey.getFieldPath(i)).append("\"");
}
classBodyBuilder.append("})\n");
}
private void generateInstanceVariables(StringBuilder classBodyBuilder) {
for (int i = 0;i < schema.numFields();i++) {
if (fieldNeedsTypeNameAnnotation(i)) {
classBodyBuilder.append(" @HollowTypeName(name=\"").append(schema.getReferencedType(i)).append("\")\n");
}
if (fieldNeedsInlineAnnotation(i)) {
importClasses.add(HollowInline.class);
classBodyBuilder.append(" @HollowInline\n");
}
classBodyBuilder.append(" public ");
classBodyBuilder.append(fieldType(i));
classBodyBuilder.append(" ").append(getFieldName(i)).append(" = ").append(defaultValue(i)).append(";\n");
}
}
private void generateConstructorForPrimaryKey(StringBuilder classBodyBuilder) {
PrimaryKey primaryKey = schema.getPrimaryKey();
if (primaryKey == null) {
return;
}
// don't allow no-arg constructors if we have a primary key
classBodyBuilder.append(" private ").append(getClassName()).append("() {}\n\n");
classBodyBuilder.append(" public ").append(getClassName()).append("(");
// classBodyBuilder.append(" this.").append(.fieldType
for (int i = 0; i < primaryKey.numFields(); i++) {
if (i > 0) {
classBodyBuilder.append(", ");
}
int fieldIndex = getIndexFromFieldName(primaryKey.getFieldPath(i));
classBodyBuilder.append(fieldType(fieldIndex)).append(" ").append(getFieldName(fieldIndex));
}
classBodyBuilder.append(") {\n");
for (int i = 0; i < primaryKey.numFields(); i++) {
int fieldIndex = getIndexFromFieldName(primaryKey.getFieldPath(i));
classBodyBuilder.append(" this.").append(getFieldName(fieldIndex)).append(" = ")
.append(getFieldName(fieldIndex)).append(";\n");
}
classBodyBuilder.append(" }\n\n");
}
private void generateChainableSetters(StringBuilder classBodyBuilder) {
for (int i = 0; i < schema.numFields(); i++) {
classBodyBuilder.append(" public ").append(getClassName()).append(" set")
.append(uppercase(getFieldName(i))).append("(")
.append(fieldType(i)).append(" ").append(getFieldName(i)).append(") {\n");
classBodyBuilder.append(" this.").append(getFieldName(i)).append(" = ")
.append(getFieldName(i)).append(";\n");
classBodyBuilder.append(" return this;\n");
classBodyBuilder.append(" }\n");
}
}
private void generateChainableAddForSetAndList(StringBuilder classBodyBuilder) {
for (int i = 0; i < schema.numFields(); i++) {
if (schema.getFieldType(i) != FieldType.REFERENCE) {
continue;
}
HollowSchema referencedSchema = dataset.getSchema(schema.getReferencedType(i));
if (referencedSchema instanceof HollowListSchema || referencedSchema instanceof HollowSetSchema) {
HollowSchema elementSchema = dataset.getSchema(referencedSchema instanceof HollowListSchema
? ((HollowListSchema) referencedSchema).getElementType()
: ((HollowSetSchema) referencedSchema).getElementType());
String elementType = buildFieldType(elementSchema);
Class fieldImplementationType = referencedSchema instanceof HollowListSchema
? ArrayList.class : HashSet.class;
importClasses.add(fieldImplementationType);
classBodyBuilder.append(" public ").append(getClassName()).append(" addTo")
.append(uppercase(getFieldName(i))).append("(")
.append(elementType).append(" ").append(lowercase(elementType)).append(") {\n");
classBodyBuilder.append(" if (this.").append(getFieldName(i)).append(" == null) {\n");
classBodyBuilder.append(" this.").append(getFieldName(i)).append(" = new ")
.append(fieldImplementationType.getSimpleName()).append("<").append(elementType).append(">();\n");
classBodyBuilder.append(" }\n");
classBodyBuilder.append(" this.").append(getFieldName(i)).append(".add(")
.append(lowercase(elementType)).append(");\n");
classBodyBuilder.append(" return this;\n");
classBodyBuilder.append(" }\n");
}
}
}
private void generateEqualsMethod(StringBuilder classBodyBuilder) {
classBodyBuilder.append(" public boolean equals(Object other) {\n");
classBodyBuilder.append(" if (other == this) return true;\n");
classBodyBuilder.append(" if (!(other instanceof ").append(getClassName()).append("))\n");
classBodyBuilder.append(" return false;\n\n");
classBodyBuilder.append(" ").append(getClassName()).append(" o = (").append(getClassName()).append(") other;\n");
for(int i=0;i<schema.numFields();i++) {
switch(schema.getFieldType(i)) {
case BOOLEAN:
case DOUBLE:
case FLOAT:
case INT:
case LONG:
classBodyBuilder.append(" if (o.").append(getFieldName(i)).append(" != ").append(getFieldName(i)).append(") return false;\n");
break;
case BYTES:
case STRING:
importClasses.add(Objects.class);
classBodyBuilder.append(" if (!Objects.equals(o.").append(getFieldName(i)).append(", ")
.append(getFieldName(i)).append(")) return false;\n");
break;
case REFERENCE:
classBodyBuilder.append(" if (o.").append(getFieldName(i)).append(" == null) {\n");
classBodyBuilder.append(" if (").append(getFieldName(i)).append(" != null) return false;\n");
classBodyBuilder.append(" } else if (!o.").append(getFieldName(i)).append(".equals(").append(getFieldName(i)).append(")) return false;\n");
break;
}
}
classBodyBuilder.append(" return true;\n");
classBodyBuilder.append(" }\n\n");
}
private void generateHashCodeMethod(StringBuilder classBodyBuilder) {
classBodyBuilder.append(" public int hashCode() {\n");
classBodyBuilder.append(" int hashCode = 1;\n");
boolean tempExists = false;
for (int i = 0; i < schema.numFields(); i++) {
String fieldName = getFieldName(i);
switch (schema.getFieldType(i)) {
case BOOLEAN:
classBodyBuilder.append(" hashCode = hashCode * 31 + (" + fieldName + "? 1231 : 1237);\n");
break;
case DOUBLE:
if (!tempExists)
classBodyBuilder.append(" long temp;\n");
classBodyBuilder.append(" temp = java.lang.Double.doubleToLongBits(" + fieldName + ")\n");
classBodyBuilder.append(" hashCode = hashCode * 31 + (int) (temp ^ (temp >>> 32));\n");
break;
case FLOAT:
classBodyBuilder.append(" hashCode = hashCode * 31 + java.lang.Float.floatToIntBits(" + fieldName + ");\n");
break;
case INT:
classBodyBuilder.append(" hashCode = hashCode * 31 + " + fieldName + ";\n");
break;
case LONG:
classBodyBuilder.append(" hashCode = hashCode * 31 + (int) (" + fieldName + " ^ ("+ fieldName + " >>> 32));\n");
break;
case BYTES:
case STRING:
importClasses.add(Objects.class);
classBodyBuilder.append(" hashCode = hashCode * 31 + Objects.hash(" + fieldName + ");\n");
break;
case REFERENCE:
importClasses.add(Objects.class);
classBodyBuilder.append(" hashCode = hashCode * 31 + Objects.hash(" + fieldName + ");\n");
break;
}
}
classBodyBuilder.append(" return hashCode;\n");
classBodyBuilder.append(" }\n\n");
}
private void generateToStringMethod(StringBuilder classBodyBuilder) {
classBodyBuilder.append(" public String toString() {\n");
classBodyBuilder.append(" StringBuilder builder = new StringBuilder(\"").append(getClassName()).append("{\");\n");
for (int i=0;i<schema.numFields();i++) {
classBodyBuilder.append(" builder.append(\"");
if (i > 0)
classBodyBuilder.append(",");
classBodyBuilder.append(getFieldName(i)).append("=\").append(").append(getFieldName(i)).append(");\n");
}
classBodyBuilder.append(" builder.append(\"}\");\n");
classBodyBuilder.append(" return builder.toString();\n");
classBodyBuilder.append(" }\n\n");
}
private void generateCloneMethod(StringBuilder classBodyBuilder) {
classBodyBuilder.append(" public ").append(getClassName()).append(" clone() {\n");
classBodyBuilder.append(" try {\n");
classBodyBuilder.append(" ").append(getClassName())
.append(" clone = (" + getClassName() + ") super.clone();\n");
if (memoizeOrdinal) {
classBodyBuilder.append(" clone.__assigned_ordinal = -1;\n");
}
classBodyBuilder.append(" return clone;\n");
classBodyBuilder
.append(" } catch (CloneNotSupportedException cnse) { throw new RuntimeException(cnse); }\n");
}
private boolean fieldNeedsTypeNameAnnotation(int i) {
if (schema.getFieldType(i) == FieldType.REFERENCE) {
HollowSchema referencedSchema = dataset.getSchema(schema.getReferencedType(i));
return !referencedSchema.getName().equals(expectedCollectionClassName(referencedSchema));
}
return false;
}
private boolean fieldNeedsInlineAnnotation(int i) {
return schema.getFieldType(i) == FieldType.STRING;
}
private String fieldType(int i) {
switch (schema.getFieldType(i)) {
case BOOLEAN:
case BYTES:
case DOUBLE:
case FLOAT:
case INT:
case LONG:
case STRING:
return getJavaScalarType(schema.getFieldType(i));
case REFERENCE:
return buildFieldType(dataset.getSchema(schema.getReferencedType(i)));
default:
throw new IllegalArgumentException("Field type is unrecognized: " + schema.getFieldType(i));
}
}
private String defaultValue(int i) {
switch (schema.getFieldType(i)) {
case BOOLEAN:
return "false";
case DOUBLE:
return "java.lang.Double.NaN";
case FLOAT:
return "java.lang.Float.NaN";
case INT:
return "java.lang.Integer.MIN_VALUE";
case LONG:
return "java.lang.Long.MIN_VALUE";
case BYTES:
case STRING:
case REFERENCE:
return "null";
}
throw new IllegalArgumentException("Field type is unrecognized: " + schema.getFieldType(i));
}
private String buildFieldType(HollowSchema referencedSchema) {
if (referencedSchema instanceof HollowObjectSchema) {
return buildClassName(referencedSchema.getName(), classNameSuffix);
} else if (referencedSchema instanceof HollowListSchema) {
importClasses.add(List.class);
HollowSchema elementSchema = dataset.getSchema(((HollowListSchema)referencedSchema).getElementType());
return "List<" + buildFieldType(elementSchema) + ">";
} else if (referencedSchema instanceof HollowSetSchema) {
importClasses.add(Set.class);
HollowSchema elementSchema = dataset.getSchema(((HollowSetSchema)referencedSchema).getElementType());
return "Set<" + buildFieldType(elementSchema) + ">";
} else if (referencedSchema instanceof HollowMapSchema) {
importClasses.add(Map.class);
HollowSchema keySchema = dataset.getSchema(((HollowMapSchema)referencedSchema).getKeyType());
HollowSchema valueSchema = dataset.getSchema(((HollowMapSchema)referencedSchema).getValueType());
return "Map<" + buildFieldType(keySchema) + ", " + buildFieldType(valueSchema) + ">";
}
throw new IllegalArgumentException("Schema is unrecognized type " + referencedSchema.getClass().getSimpleName());
}
private String expectedCollectionClassName(HollowSchema referencedSchema) {
if (referencedSchema instanceof HollowObjectSchema) {
return referencedSchema.getName();
} else if (referencedSchema instanceof HollowListSchema) {
importClasses.add(List.class);
HollowSchema elementSchema = dataset.getSchema(((HollowListSchema)referencedSchema).getElementType());
return "ListOf" + expectedCollectionClassName(elementSchema);
} else if (referencedSchema instanceof HollowSetSchema) {
importClasses.add(Set.class);
HollowSchema elementSchema = dataset.getSchema(((HollowSetSchema)referencedSchema).getElementType());
return "SetOf" + expectedCollectionClassName(elementSchema);
} else if (referencedSchema instanceof HollowMapSchema) {
importClasses.add(Map.class);
HollowSchema keySchema = dataset.getSchema(((HollowMapSchema)referencedSchema).getKeyType());
HollowSchema valueSchema = dataset.getSchema(((HollowMapSchema)referencedSchema).getValueType());
return "MapOf" + expectedCollectionClassName(keySchema) + "To" + expectedCollectionClassName(valueSchema);
}
throw new IllegalArgumentException("Expected HollowCollectionSchema or HollowMapSchema but got " + referencedSchema.getClass().getSimpleName());
}
/**
* Returns a field name that is same for use as a java variable.
*/
private String getFieldName(int index) {
return substituteInvalidChars(schema.getFieldName(index));
}
private int getIndexFromFieldName(String fieldName) {
for (int i = 0; i < schema.numFields(); i++) {
if (getFieldName(i).equals(fieldName)) {
return i;
}
}
return -1;
}
}
| 9,306 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/testdata/HollowListTypeTestDataAPIClassGenerator.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.testdata;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.schema.HollowListSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.schema.HollowSchema.SchemaType;
class HollowListTypeTestDataAPIClassGenerator {
private final HollowDataset dataset;
private final HollowListSchema schema;
private final String packageName;
private final String className;
private final String elementClassName;
public HollowListTypeTestDataAPIClassGenerator(HollowDataset dataset, HollowListSchema schema, String packageName) {
this.dataset = dataset;
this.schema = schema;
this.packageName = packageName;
this.className = schema.getName() + "TestData";
this.elementClassName = schema.getElementType() + "TestData";
}
public String generate() {
StringBuilder builder = new StringBuilder();
builder.append("package " + packageName + ";\n\n");
builder.append("import com.netflix.hollow.api.testdata.HollowTestListRecord;\n" +
"import com.netflix.hollow.core.schema.HollowListSchema;\n\n");
builder.append("public class " + className + "<T> extends HollowTestListRecord<T> {\n\n");
builder.append(" " + className + "(T parent) {\n");
builder.append(" super(parent);\n");
builder.append(" }\n\n");
String elementReturnType = elementClassName + "<" + className + "<T>>";
builder.append(" public " + elementReturnType + " " + schema.getElementType() + "() {\n");
builder.append(" " + elementReturnType + " __e = new " + elementReturnType + "(this);\n");
builder.append(" super.addElement(__e);\n");
builder.append(" return __e;\n");
builder.append(" }\n\n");
HollowSchema elementSchema = dataset.getSchema(schema.getElementType());
if(elementSchema.getSchemaType() == SchemaType.OBJECT) {
HollowObjectSchema elementObjSchema = (HollowObjectSchema)elementSchema;
if(elementObjSchema.numFields() == 1 && elementObjSchema.getFieldType(0) != FieldType.REFERENCE) {
switch(elementObjSchema.getFieldType(0)) {
case INT:
builder.append(" public " + className + "<T> " + schema.getElementType() + "(Integer value) {\n");
builder.append(" " + schema.getElementType() + "()." + elementObjSchema.getFieldName(0) + "(value);\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case LONG:
builder.append(" public " + className + "<T> " + schema.getElementType() + "(Long value) {\n");
builder.append(" " + schema.getElementType() + "()." + elementObjSchema.getFieldName(0) + "(value);\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case FLOAT:
builder.append(" public " + className + "<T> " + schema.getElementType() + "(Float value) {\n");
builder.append(" " + schema.getElementType() + "()." + elementObjSchema.getFieldName(0) + "(value);\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case DOUBLE:
builder.append(" public " + className + "<T> " + schema.getElementType() + "(Double value) {\n");
builder.append(" " + schema.getElementType() + "()." + elementObjSchema.getFieldName(0) + "(value);\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case BOOLEAN:
builder.append(" public " + className + "<T> " + schema.getElementType() + "(Boolean value) {\n");
builder.append(" " + schema.getElementType() + "()." + elementObjSchema.getFieldName(0) + "(value);\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case BYTES:
builder.append(" public " + className + "<T> " + schema.getElementType() + "(byte[] value) {\n");
builder.append(" " + schema.getElementType() + "()." + elementObjSchema.getFieldName(0) + "(value);\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case STRING:
builder.append(" public " + className + "<T> " + schema.getElementType() + "(String value) {\n");
builder.append(" " + schema.getElementType() + "()." + elementObjSchema.getFieldName(0) + "(value);\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
default:
break;
}
}
}
builder.append(" private static final HollowListSchema SCHEMA = new HollowListSchema(\"").append(schema.getName()).append("\", \"").append(schema.getElementType()).append("\");\n\n");
builder.append(" @Override public HollowListSchema getSchema() { return SCHEMA; }\n\n");
builder.append("}");
return builder.toString();
}
}
| 9,307 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/testdata/HollowObjectTypeTestDataAPIClassGenerator.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.testdata;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.schema.HollowSchema.SchemaType;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
class HollowObjectTypeTestDataAPIClassGenerator {
private final HollowDataset dataset;
private final HollowObjectSchema schema;
private final String packageName;
private final String className;
public HollowObjectTypeTestDataAPIClassGenerator(HollowDataset dataset, HollowObjectSchema schema, String packageName) {
this.dataset = dataset;
this.schema = schema;
this.packageName = packageName;
this.className = schema.getName() + "TestData";
}
public String generate() {
StringBuilder builder = new StringBuilder();
StringBuilder importBuilder = new StringBuilder();
importBuilder.append("package " + packageName + ";\n\n");
importBuilder.append("import com.netflix.hollow.api.testdata.HollowTestObjectRecord;\n");
if(schema.getPrimaryKey() != null)
importBuilder.append("import com.netflix.hollow.core.index.key.PrimaryKey;\n");
importBuilder.append("import com.netflix.hollow.core.schema.HollowObjectSchema;\n");
importBuilder.append("import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;\n\n");
Set<String> fieldTypesToImport = new HashSet<>();
builder.append("public class " + className + "<T> extends HollowTestObjectRecord<T> {\n\n");
builder.append(" " + className + "(T parent){\n");
builder.append(" super(parent);\n");
builder.append(" }\n\n");
for(int i=0;i<schema.numFields();i++) {
String fieldName = schema.getFieldName(i);
switch(schema.getFieldType(i)) {
case INT:
builder.append(" public " + className + "<T> " + fieldName + "(Integer " + fieldName + ") {\n");
builder.append(" super.addField(\"" + fieldName + "\", " + fieldName + ");\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case LONG:
builder.append(" public " + className + "<T> " + fieldName + "(Long " + fieldName + ") {\n");
builder.append(" super.addField(\"" + fieldName + "\", " + fieldName + ");\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case FLOAT:
builder.append(" public " + className + "<T> " + fieldName + "(Float " + fieldName + ") {\n");
builder.append(" super.addField(\"" + fieldName + "\", " + fieldName + ");\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case DOUBLE:
builder.append(" public " + className + "<T> " + fieldName + "(Double " + fieldName + ") {\n");
builder.append(" super.addField(\"" + fieldName + "\", " + fieldName + ");\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case BOOLEAN:
builder.append(" public " + className + "<T> " + fieldName + "(Boolean " + fieldName + ") {\n");
builder.append(" super.addField(\"" + fieldName + "\", " + fieldName + ");\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case BYTES:
builder.append(" public " + className + "<T> " + fieldName + "(byte[] " + fieldName + ") {\n");
builder.append(" super.addField(\"" + fieldName + "\", " + fieldName + ");\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case STRING:
builder.append(" public " + className + "<T> " + fieldName + "(String " + fieldName + ") {\n");
builder.append(" super.addField(\"" + fieldName + "\", " + fieldName + ");\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case REFERENCE:
String refType = schema.getReferencedType(i);
String returnType = className(refType) + "<" + className + "<T>>";
builder.append(" public " + returnType + " " + fieldName + "() {\n");
builder.append(" " + returnType + " __x = new " + returnType + "(this);\n");
builder.append(" super.addField(\"" + fieldName + "\", __x);\n");
builder.append(" return __x;\n");
builder.append(" }\n\n");
if(canErgonomicShortcut(i)) {
HollowObjectSchema refSchema = (HollowObjectSchema)dataset.getSchema(refType);
String refField = refSchema.getFieldName(0);
switch(refSchema.getFieldType(0)) {
case INT:
builder.append(" public " + className + "<T> " + fieldName + "(Integer " + fieldName + ") {\n");
builder.append(" " + fieldName + "()." + refField + "(" + fieldName + ");\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case LONG:
builder.append(" public " + className + "<T> " + fieldName + "(Long " + fieldName + ") {\n");
builder.append(" " + fieldName + "()." + refField + "(" + fieldName + ");\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case FLOAT:
builder.append(" public " + className + "<T> " + fieldName + "(Float " + fieldName + ") {\n");
builder.append(" " + fieldName + "()." + refField + "(" + fieldName + ");\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case DOUBLE:
builder.append(" public " + className + "<T> " + fieldName + "(Double " + fieldName + ") {\n");
builder.append(" " + fieldName + "()." + refField + "(" + fieldName + ");\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case BOOLEAN:
builder.append(" public " + className + "<T> " + fieldName + "(Boolean " + fieldName + ") {\n");
builder.append(" " + fieldName + "()." + refField + "(" + fieldName + ");\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case BYTES:
builder.append(" public " + className + "<T> " + fieldName + "(byte[] " + fieldName + ") {\n");
builder.append(" " + fieldName + "()." + refField + "(" + fieldName + ");\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case STRING:
builder.append(" public " + className + "<T> " + fieldName + "(String " + fieldName + ") {\n");
builder.append(" " + fieldName + "()." + refField + "(" + fieldName + ");\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
default:
throw new IllegalStateException("Cannot actually ergonomic shortcut");
}
}
}
}
builder.append(" public static final HollowObjectSchema SCHEMA = new HollowObjectSchema(\"" + schema.getName() + "\", " + schema.numFields());
if(schema.getPrimaryKey() != null) {
builder.append(", new PrimaryKey(\"" + schema.getName() + "\"");
for(int i=0;i<schema.getPrimaryKey().numFields();i++) {
builder.append(", \"" + schema.getPrimaryKey().getFieldPath(i) + "\"");
}
builder.append(")");
}
builder.append(");\n\n");
builder.append(" static {\n");
for(int i=0;i<schema.numFields();i++) {
builder.append(" SCHEMA.addField(\"" + schema.getFieldName(i) + "\", FieldType." + schema.getFieldType(i).name());
if(schema.getFieldType(i) == FieldType.REFERENCE)
builder.append(", \"" + schema.getReferencedType(i) + "\"");
builder.append(");\n");
}
builder.append(" }\n\n");
builder.append(" @Override public HollowObjectSchema getSchema() { return SCHEMA; }\n\n");
builder.append("}");
if(!fieldTypesToImport.isEmpty()) {
List<String> fieldTypesList = new ArrayList<>(fieldTypesToImport);
Collections.sort(fieldTypesList);
for(String fieldType : fieldTypesList) {
importBuilder.append("import " + packageName + "." + className(fieldType) + "." + fieldType + "Field;\n");
}
importBuilder.append("\n");
}
return importBuilder.toString() + builder.toString();
}
public String className(String type) {
return type + "TestData";
}
public boolean canErgonomicShortcut(int fieldIdx) {
if(schema.getFieldType(fieldIdx) != FieldType.REFERENCE)
return false;
String refType = schema.getReferencedType(fieldIdx);
HollowSchema refSchema = dataset.getSchema(refType);
return canErgonomicShortcut(refSchema);
}
public boolean canErgonomicShortcut(HollowSchema schema) {
if(schema.getSchemaType() != SchemaType.OBJECT)
return false;
HollowObjectSchema objSchema = (HollowObjectSchema)schema;
if(objSchema.numFields() != 1)
return false;
return objSchema.getFieldType(0) != FieldType.REFERENCE;
}
}
| 9,308 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/testdata/HollowTestDataAPIGenerator.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.testdata;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.schema.HollowListSchema;
import com.netflix.hollow.core.schema.HollowMapSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.schema.HollowSetSchema;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
public class HollowTestDataAPIGenerator {
private HollowDataset dataset;
private String apiClassname;
private String packageName;
private Path destinationPath;
public static Builder newBuilder() {
HollowTestDataAPIGenerator gen = new HollowTestDataAPIGenerator();
return gen.theBuilder();
}
private Builder theBuilder() {
return new Builder();
}
public class Builder {
public Builder withDataset(HollowDataset dataset) {
HollowTestDataAPIGenerator.this.dataset = dataset;
return this;
}
public Builder withAPIClassname(String apiClassname) {
HollowTestDataAPIGenerator.this.apiClassname = apiClassname;
return this;
}
public Builder withPackageName(String packageName) {
HollowTestDataAPIGenerator.this.packageName = packageName;
return this;
}
public Builder withDestination(String destinationPath) {
return withDestination(Paths.get(destinationPath));
}
public Builder withDestination(Path destinationPath) {
HollowTestDataAPIGenerator.this.destinationPath = destinationPath;
return this;
}
public HollowTestDataAPIGenerator build() {
return HollowTestDataAPIGenerator.this;
}
}
public void generateSourceFiles() throws IOException {
generate(dataset, packageName, apiClassname, destinationPath);
}
private void generate(HollowDataset dataset, String packageName, String apiClassName, Path destination) throws IOException {
Path apiClassDestination = destination.resolve(apiClassName + ".java");
Files.createDirectories(destination);
String apiClassContent = new HollowTestDataAPIClassGenerator(dataset, apiClassName, packageName).generate();
try(FileWriter writer = new FileWriter(apiClassDestination.toFile())) {
writer.write(apiClassContent);
}
for(HollowSchema schema : dataset.getSchemas()) {
File classDestination = destination.resolve(schema.getName() + "TestData.java").toFile();
String classContent = null;
switch(schema.getSchemaType()) {
case OBJECT:
classContent = new HollowObjectTypeTestDataAPIClassGenerator(dataset, (HollowObjectSchema) schema, packageName).generate();
break;
case LIST:
classContent = new HollowListTypeTestDataAPIClassGenerator(dataset, (HollowListSchema) schema, packageName).generate();
break;
case SET:
classContent = new HollowSetTypeTestDataAPIClassGenerator(dataset, (HollowSetSchema) schema, packageName).generate();
break;
case MAP:
classContent = new HollowMapTypeTestDataAPIClassGenerator(dataset, (HollowMapSchema) schema, packageName).generate();
break;
}
if(classContent != null)
try(FileWriter writer = new FileWriter(classDestination)) {
writer.write(classContent);
}
}
}
}
| 9,309 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/testdata/HollowMapTypeTestDataAPIClassGenerator.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.testdata;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.schema.HollowMapSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.schema.HollowSchema.SchemaType;
class HollowMapTypeTestDataAPIClassGenerator {
private final HollowDataset dataset;
private final HollowMapSchema schema;
private final String packageName;
private final String className;
private final String keyClassName;
private final String valueClassName;
public HollowMapTypeTestDataAPIClassGenerator(HollowDataset dataset, HollowMapSchema schema, String packageName) {
this.dataset = dataset;
this.schema = schema;
this.packageName = packageName;
this.className = schema.getName() + "TestData";
this.keyClassName = schema.getKeyType() + "TestData";
this.valueClassName = schema.getValueType() + "TestData";
}
public String generate() {
StringBuilder builder = new StringBuilder();
builder.append("package " + packageName + ";\n\n");
builder.append("import com.netflix.hollow.api.testdata.HollowTestMapRecord;\n" +
"import com.netflix.hollow.core.schema.HollowMapSchema;\n\n");
builder.append("public class " + className + "<T> extends HollowTestMapRecord<T> {\n\n");
builder.append(" " + className + "(T parent) {\n");
builder.append(" super(parent);\n");
builder.append(" }\n\n");
builder.append(" public Entry entry() {\n");
builder.append(" Entry e = new Entry();\n");
builder.append(" addEntry(e);\n");
builder.append(" return e;\n");
builder.append(" }\n\n");
if(canErgonomicShortcut(schema.getKeyType()) && canErgonomicShortcut(schema.getValueType())) {
String keyType = getErgonomicShortcutType(schema.getKeyType());
String valueType = getErgonomicShortcutType(schema.getValueType());
builder.append(" public " + className + "<T> entry(" + keyType + " key, " + valueType + " value) {\n");
builder.append(" entry().key(key).value(value);\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
} else if(canErgonomicShortcut(schema.getKeyType())) {
// TODO
}
builder.append(" private static final HollowMapSchema SCHEMA = new HollowMapSchema(\"" + schema.getName() + "\", \"" + schema.getKeyType() + "\", \"" + schema.getValueType() + "\"");
if(schema.getHashKey() != null) {
for(String fieldPath : schema.getHashKey().getFieldPaths()) {
builder.append(", \"" + fieldPath + "\"");
}
}
builder.append(");\n\n");
builder.append(" @Override public HollowMapSchema getSchema() { return SCHEMA; }\n\n");
builder.append(" public class Entry extends HollowTestMapRecord.Entry<" + className + "<T>> {\n\n");
builder.append(" public Entry() {\n");
builder.append(" super(" + className + ".this);\n");
builder.append(" }\n\n");
builder.append(" public " + keyClassName + "<Entry> key() {\n");
builder.append(" " + keyClassName + "<Entry> __k = new " + keyClassName + "<>(this);\n");
builder.append(" setKey(__k);\n");
builder.append(" return __k;\n");
builder.append(" }\n\n");
builder.append(" public " + valueClassName + "<Entry> value() {\n");
builder.append(" " + valueClassName + "<Entry> __v = new " + valueClassName + "<>(this);\n");
builder.append(" setValue(__v);\n");
builder.append(" return __v;\n");
builder.append(" }\n\n");
if(canErgonomicShortcut(schema.getKeyType())) {
String keyType = getErgonomicShortcutType(schema.getKeyType());
String keyFieldName = getErgonomicFieldName(schema.getKeyType());
builder.append(" public Entry key(" + keyType + " key) {\n");
builder.append(" key()." + keyFieldName + "(key);\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
}
if(canErgonomicShortcut(schema.getValueType())) {
String valueType = getErgonomicShortcutType(schema.getValueType());
String valueFieldName = getErgonomicFieldName(schema.getValueType());
builder.append(" public Entry value(" + valueType + " value) {\n");
builder.append(" value()." + valueFieldName + "(value);\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
}
builder.append(" }\n\n");
builder.append("}");
return builder.toString();
}
public String className(String type) {
return type + "TestData";
}
private boolean canErgonomicShortcut(String schemaName) {
return canErgonomicShortcut(dataset.getSchema(schemaName));
}
private boolean canErgonomicShortcut(HollowSchema schema) {
if(schema.getSchemaType() != SchemaType.OBJECT)
return false;
HollowObjectSchema objSchema = (HollowObjectSchema)schema;
if(objSchema.numFields() != 1)
return false;
return objSchema.getFieldType(0) != FieldType.REFERENCE;
}
private String getErgonomicShortcutType(String schemaName) {
HollowObjectSchema schema = (HollowObjectSchema)dataset.getSchema(schemaName);
switch(schema.getFieldType(0)) {
case INT:
return "Integer";
case LONG:
return "Long";
case FLOAT:
return "Float";
case DOUBLE:
return "Double";
case BOOLEAN:
return "Boolean";
case BYTES:
return "byte[]";
case STRING:
return "String";
default:
throw new IllegalArgumentException();
}
}
private String getErgonomicFieldName(String schemaName) {
HollowObjectSchema schema = (HollowObjectSchema)dataset.getSchema(schemaName);
return schema.getFieldName(0);
}
}
| 9,310 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/testdata/HollowSetTypeTestDataAPIClassGenerator.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.testdata;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.schema.HollowSchema.SchemaType;
import com.netflix.hollow.core.schema.HollowSetSchema;
class HollowSetTypeTestDataAPIClassGenerator {
private final HollowDataset dataset;
private final HollowSetSchema schema;
private final String packageName;
private final String className;
private final String elementClassName;
public HollowSetTypeTestDataAPIClassGenerator(HollowDataset dataset, HollowSetSchema schema, String packageName) {
this.dataset = dataset;
this.schema = schema;
this.packageName = packageName;
this.className = schema.getName() + "TestData";
this.elementClassName = schema.getElementType() + "TestData";
}
public String generate() {
StringBuilder builder = new StringBuilder();
builder.append("package " + packageName + ";\n\n");
builder.append("import com.netflix.hollow.api.testdata.HollowTestSetRecord;\n" +
"import com.netflix.hollow.core.schema.HollowSetSchema;\n\n");
builder.append("public class " + className + "<T> extends HollowTestSetRecord<T> {\n\n");
builder.append(" " + className + "(T parent) {\n");
builder.append(" super(parent);\n");
builder.append(" }\n\n");
String elementReturnType = elementClassName + "<" + className + "<T>>";
builder.append(" public " + elementReturnType + " " + schema.getElementType() + "() {\n");
builder.append(" " + elementReturnType + " __e = new " + elementReturnType + "(this);\n");
builder.append(" super.addElement(__e);\n");
builder.append(" return __e;\n");
builder.append(" }\n\n");
HollowSchema elementSchema = dataset.getSchema(schema.getElementType());
if(elementSchema.getSchemaType() == SchemaType.OBJECT) {
HollowObjectSchema elementObjSchema = (HollowObjectSchema)elementSchema;
if(elementObjSchema.numFields() == 1 && elementObjSchema.getFieldType(0) != FieldType.REFERENCE) {
switch(elementObjSchema.getFieldType(0)) {
case INT:
builder.append(" public " + className + "<T> " + schema.getElementType() + "(Integer value) {\n");
builder.append(" " + schema.getElementType() + "()." + elementObjSchema.getFieldName(0) + "(value);\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case LONG:
builder.append(" public " + className + "<T> " + schema.getElementType() + "(Long value) {\n");
builder.append(" " + schema.getElementType() + "()." + elementObjSchema.getFieldName(0) + "(value);\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case FLOAT:
builder.append(" public " + className + "<T> " + schema.getElementType() + "(Float value) {\n");
builder.append(" " + schema.getElementType() + "()." + elementObjSchema.getFieldName(0) + "(value);\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case DOUBLE:
builder.append(" public " + className + "<T> " + schema.getElementType() + "(Double value) {\n");
builder.append(" " + schema.getElementType() + "()." + elementObjSchema.getFieldName(0) + "(value);\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case BOOLEAN:
builder.append(" public " + className + "<T> " + schema.getElementType() + "(Boolean value) {\n");
builder.append(" " + schema.getElementType() + "()." + elementObjSchema.getFieldName(0) + "(value);\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case BYTES:
builder.append(" public " + className + "<T> " + schema.getElementType() + "(byte[] value) {\n");
builder.append(" " + schema.getElementType() + "()." + elementObjSchema.getFieldName(0) + "(value);\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
case STRING:
builder.append(" public " + className + "<T> " + schema.getElementType() + "(String value) {\n");
builder.append(" " + schema.getElementType() + "()." + elementObjSchema.getFieldName(0) + "(value);\n");
builder.append(" return this;\n");
builder.append(" }\n\n");
break;
default:
break;
}
}
}
builder.append(" private static final HollowSetSchema SCHEMA = new HollowSetSchema(\"").append(schema.getName()).append("\", \"").append(schema.getElementType()).append("\"");
if(schema.getHashKey() != null) {
for(String fieldPath : schema.getHashKey().getFieldPaths()) {
builder.append(", \"" + fieldPath + "\"");
}
}
builder.append(");\n\n");
builder.append(" @Override public HollowSetSchema getSchema() { return SCHEMA; }\n\n");
builder.append("}");
return builder.toString();
}
}
| 9,311 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/testdata/HollowTestDataAPIClassGenerator.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.testdata;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.schema.HollowSchema;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
class HollowTestDataAPIClassGenerator {
private final HollowDataset dataset;
private final String apiClassName;
private final String packageName;
public HollowTestDataAPIClassGenerator(HollowDataset dataset, String apiClassName, String packageName) {
this.dataset = dataset;
this.apiClassName = apiClassName;
this.packageName = packageName;
}
public String generate() {
StringBuilder builder = new StringBuilder();
builder.append("package " + packageName + ";\n\n");
builder.append("import com.netflix.hollow.api.testdata.HollowTestDataset;\n\n");
builder.append("public class " + apiClassName + " extends HollowTestDataset {\n\n");
List<HollowSchema> schemas = new ArrayList<>(dataset.getSchemas());
schemas.sort(Comparator.comparing(HollowSchema::getName));
for(HollowSchema schema : schemas) {
builder.append(" public " + schema.getName() + "TestData<Void> " + schema.getName() + "() {\n");
builder.append(" " + schema.getName() + "TestData<Void> rec = new " + schema.getName() + "TestData<>(null);\n");
builder.append(" add(rec);\n");
builder.append(" return rec;\n");
builder.append(" }\n\n");
}
builder.append("}");
return builder.toString();
}
}
| 9,312 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/objects/HollowFactoryJavaGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.objects;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.delegateCachedClassname;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.hollowFactoryClassname;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.typeAPIClassname;
import com.netflix.hollow.api.codegen.CodeGeneratorConfig;
import com.netflix.hollow.api.codegen.HollowAPIGenerator;
import com.netflix.hollow.api.codegen.HollowConsumerJavaFileGenerator;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.api.custom.HollowTypeAPI;
import com.netflix.hollow.api.objects.delegate.HollowListCachedDelegate;
import com.netflix.hollow.api.objects.delegate.HollowMapCachedDelegate;
import com.netflix.hollow.api.objects.delegate.HollowSetCachedDelegate;
import com.netflix.hollow.api.objects.provider.HollowFactory;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess;
import com.netflix.hollow.core.schema.HollowListSchema;
import com.netflix.hollow.core.schema.HollowMapSchema;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.schema.HollowSetSchema;
import java.util.Arrays;
/**
* This class contains template logic for generating a {@link HollowAPI} implementation. Not intended for external consumption.
*
* @see HollowAPIGenerator
*/
public class HollowFactoryJavaGenerator extends HollowConsumerJavaFileGenerator {
public static final String SUB_PACKAGE_NAME = "core";
private final String objectClassName;
private final HollowSchema schema;
public HollowFactoryJavaGenerator(String packageName, HollowSchema schema, HollowDataset dataset,
CodeGeneratorConfig config) {
super(packageName, SUB_PACKAGE_NAME, dataset, config);
this.objectClassName = hollowImplClassname(schema.getName());
this.className = hollowFactoryClassname(schema.getName());
this.schema = schema;
}
@Override
public String generate() {
StringBuilder builder = new StringBuilder();
appendPackageAndCommonImports(builder, null, Arrays.asList(schema));
builder.append("import " + HollowFactory.class.getName() + ";\n");
builder.append("import " + HollowTypeDataAccess.class.getName() + ";\n");
builder.append("import " + HollowTypeAPI.class.getName() + ";\n");
if(schema instanceof HollowListSchema)
builder.append("import " + HollowListCachedDelegate.class.getName() + ";\n");
if(schema instanceof HollowSetSchema)
builder.append("import " + HollowSetCachedDelegate.class.getName() + ";\n");
if(schema instanceof HollowMapSchema)
builder.append("import " + HollowMapCachedDelegate.class.getName() + ";\n");
builder.append("\n@SuppressWarnings(\"all\")\n");
builder.append("public class " + className + "<T extends " + objectClassName + "> extends HollowFactory<T> {\n\n");
builder.append(" @Override\n");
builder.append(" public T newHollowObject(HollowTypeDataAccess dataAccess, HollowTypeAPI typeAPI, int ordinal) {\n");
builder.append(" return (T)new " + objectClassName + "(((" + typeAPIClassname(schema.getName()) + ")typeAPI).getDelegateLookupImpl(), ordinal);\n");
builder.append(" }\n\n");
builder.append(" @Override\n");
builder.append(" public T newCachedHollowObject(HollowTypeDataAccess dataAccess, HollowTypeAPI typeAPI, int ordinal) {\n");
builder.append(" return (T)new " + objectClassName + "(new " + delegateCachedClassname(schema) + "((" + typeAPIClassname(schema.getName()) + ")typeAPI, ordinal), ordinal);\n");
builder.append(" }\n\n");
builder.append("}");
return builder.toString();
}
}
| 9,313 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/objects/HollowSetJavaGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.objects;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.typeAPIClassname;
import com.netflix.hollow.api.codegen.CodeGeneratorConfig;
import com.netflix.hollow.api.codegen.HollowAPIGenerator;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.api.objects.HollowSet;
import com.netflix.hollow.api.objects.delegate.HollowSetDelegate;
import com.netflix.hollow.api.objects.generic.GenericHollowRecordHelper;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.schema.HollowSetSchema;
import java.util.Arrays;
import java.util.Set;
/**
* This class contains template logic for generating a {@link HollowAPI} implementation. Not intended for external consumption.
*
* @see HollowAPIGenerator
*
*/
public class HollowSetJavaGenerator extends HollowCollectionsGenerator {
private final HollowSetSchema schema;
private final String elementClassName;
private final boolean parameterize;
public HollowSetJavaGenerator(String packageName, String apiClassname, HollowSetSchema schema, Set<String>
parameterizedTypes, boolean parameterizeClassNames, HollowDataset dataset, CodeGeneratorConfig config) {
super(packageName, apiClassname, schema, dataset, config);
this.schema = schema;
this.elementClassName = hollowImplClassname(schema.getElementType());
this.parameterize = parameterizeClassNames || parameterizedTypes.contains(schema.getElementType());
}
@Override
public String generate() {
StringBuilder builder = new StringBuilder();
appendPackageAndCommonImports(builder, apiClassname, Arrays.<HollowSchema>asList(schema));
builder.append("import " + HollowSet.class.getName() + ";\n");
builder.append("import " + HollowSetSchema.class.getName() + ";\n");
builder.append("import " + HollowSetDelegate.class.getName() + ";\n");
builder.append("import " + GenericHollowRecordHelper.class.getName() + ";\n\n");
builder.append("@SuppressWarnings(\"all\")\n");
if(parameterize)
builder.append("public class " + className + "<T> extends HollowSet<T> {\n\n");
else
builder.append("public class " + className + " extends HollowSet<" + elementClassName + "> {\n\n");
appendConstructor(builder);
appendInstantiateMethod(builder);
appendEqualityMethod(builder);
appendAPIAccessor(builder);
appendTypeAPIAccessor(builder);
builder.append("}");
return builder.toString();
}
private void appendConstructor(StringBuilder classBuilder) {
classBuilder.append(" public " + className + "(HollowSetDelegate delegate, int ordinal) {\n");
classBuilder.append(" super(delegate, ordinal);\n");
classBuilder.append(" }\n\n");
}
private void appendInstantiateMethod(StringBuilder classBuilder) {
String returnType = parameterize ? "T" : elementClassName;
classBuilder.append(" @Override\n");
classBuilder.append(" public ").append(returnType).append(" instantiateElement(int ordinal) {\n");
classBuilder.append(" return (").append(returnType).append(") api().get").append(elementClassName).append("(ordinal);\n");
classBuilder.append(" }\n\n");
}
private void appendEqualityMethod(StringBuilder classBuilder) {
classBuilder.append(" @Override\n");
classBuilder.append(" public boolean equalsElement(int elementOrdinal, Object testObject) {\n");
classBuilder.append(" return GenericHollowRecordHelper.equalObject(getSchema().getElementType(), elementOrdinal, testObject);\n");
classBuilder.append(" }\n\n");
}
private void appendAPIAccessor(StringBuilder classBuilder) {
classBuilder.append(" public " + apiClassname + " api() {\n");
classBuilder.append(" return typeApi().getAPI();\n");
classBuilder.append(" }\n\n");
}
private void appendTypeAPIAccessor(StringBuilder classBuilder) {
String typeAPIClassname = typeAPIClassname(schema.getName());
classBuilder.append(" public " + typeAPIClassname + " typeApi() {\n");
classBuilder.append(" return (").append(typeAPIClassname).append(") delegate.getTypeAPI();\n");
classBuilder.append(" }\n\n");
}
}
| 9,314 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/objects/HollowCollectionsGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.hollow.api.codegen.objects;
import com.netflix.hollow.api.codegen.CodeGeneratorConfig;
import com.netflix.hollow.api.codegen.HollowConsumerJavaFileGenerator;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.schema.HollowSchema;
public abstract class HollowCollectionsGenerator extends HollowConsumerJavaFileGenerator {
public static final String SUB_PACKAGE_NAME = "collections";
protected final String apiClassname;
public HollowCollectionsGenerator(String packageName, String apiClassname, HollowSchema schema,
HollowDataset dataset, CodeGeneratorConfig config) {
super(packageName, SUB_PACKAGE_NAME, dataset, config);
this.apiClassname = apiClassname;
this.className = hollowImplClassname(schema.getName());
}
} | 9,315 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/objects/HollowListJavaGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.objects;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.typeAPIClassname;
import com.netflix.hollow.api.codegen.CodeGeneratorConfig;
import com.netflix.hollow.api.codegen.HollowAPIGenerator;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.api.objects.HollowList;
import com.netflix.hollow.api.objects.delegate.HollowListDelegate;
import com.netflix.hollow.api.objects.generic.GenericHollowRecordHelper;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.schema.HollowListSchema;
import com.netflix.hollow.core.schema.HollowSchema;
import java.util.Arrays;
import java.util.Set;
/**
* This class contains template logic for generating a {@link HollowAPI} implementation. Not intended for external consumption.
*
* @see HollowAPIGenerator
*
*/
public class HollowListJavaGenerator extends HollowCollectionsGenerator {
private final HollowListSchema schema;
private final String elementClassName;
private final boolean parameterize;
public HollowListJavaGenerator(String packageName, String apiClassname, HollowListSchema schema, Set<String>
parameterizedTypes, boolean parameterizeClassNames, HollowDataset dataset, CodeGeneratorConfig config) {
super(packageName, apiClassname, schema, dataset, config);
this.schema = schema;
this.elementClassName = hollowImplClassname(schema.getElementType());
this.parameterize = parameterizeClassNames || parameterizedTypes.contains(schema.getElementType());
}
@Override
public String generate() {
StringBuilder builder = new StringBuilder();
appendPackageAndCommonImports(builder, apiClassname, Arrays.<HollowSchema>asList(schema));
builder.append("import " + HollowList.class.getName() + ";\n");
builder.append("import " + HollowListSchema.class.getName() + ";\n");
builder.append("import " + HollowListDelegate.class.getName() + ";\n");
builder.append("import " + GenericHollowRecordHelper.class.getName() + ";\n\n");
builder.append("@SuppressWarnings(\"all\")\n");
if(parameterize)
builder.append("public class " + className + "<T> extends HollowList<T> {\n\n");
else
builder.append("public class " + className + " extends HollowList<" + elementClassName + "> {\n\n");
appendConstructor(builder);
appendInstantiateMethod(builder);
appendEqualityMethod(builder);
appendAPIAccessor(builder);
appendTypeAPIAccessor(builder);
builder.append("}");
return builder.toString();
}
private void appendConstructor(StringBuilder classBuilder) {
classBuilder.append(" public " + className + "(HollowListDelegate delegate, int ordinal) {\n");
classBuilder.append(" super(delegate, ordinal);\n");
classBuilder.append(" }\n\n");
}
private void appendInstantiateMethod(StringBuilder classBuilder) {
String returnType = parameterize ? "T" : elementClassName;
classBuilder.append(" @Override\n");
classBuilder.append(" public ").append(returnType).append(" instantiateElement(int ordinal) {\n");
classBuilder.append(" return (").append(returnType).append(") api().get").append(elementClassName).append("(ordinal);\n");
classBuilder.append(" }\n\n");
}
private void appendEqualityMethod(StringBuilder classBuilder) {
classBuilder.append(" @Override\n");
classBuilder.append(" public boolean equalsElement(int elementOrdinal, Object testObject) {\n");
classBuilder.append(" return GenericHollowRecordHelper.equalObject(getSchema().getElementType(), elementOrdinal, testObject);\n");
classBuilder.append(" }\n\n");
}
private void appendAPIAccessor(StringBuilder classBuilder) {
classBuilder.append(" public " + apiClassname + " api() {\n");
classBuilder.append(" return typeApi().getAPI();\n");
classBuilder.append(" }\n\n");
}
private void appendTypeAPIAccessor(StringBuilder classBuilder) {
String typeAPIClassname = typeAPIClassname(schema.getName());
classBuilder.append(" public " + typeAPIClassname + " typeApi() {\n");
classBuilder.append(" return (").append(typeAPIClassname).append(") delegate.getTypeAPI();\n");
classBuilder.append(" }\n\n");
}
}
| 9,316 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/objects/HollowMapJavaGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.objects;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.typeAPIClassname;
import com.netflix.hollow.api.codegen.CodeGeneratorConfig;
import com.netflix.hollow.api.codegen.HollowAPIGenerator;
import com.netflix.hollow.api.codegen.HollowCodeGenerationUtils;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.api.objects.HollowMap;
import com.netflix.hollow.api.objects.delegate.HollowMapDelegate;
import com.netflix.hollow.api.objects.generic.GenericHollowRecordHelper;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.schema.HollowMapSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.core.schema.HollowSchema;
import java.util.Arrays;
import java.util.Set;
/**
* This class contains template logic for generating a {@link HollowAPI} implementation. Not intended for external consumption.
*
* @see HollowAPIGenerator
*/
public class HollowMapJavaGenerator extends HollowCollectionsGenerator {
private final HollowMapSchema schema;
private final String keyClassName;
private final String valueClassName;
private final boolean parameterizeKey;
private final boolean parameterizeValue;
public HollowMapJavaGenerator(String packageName, String apiClassname, HollowMapSchema schema,
HollowDataset dataset, Set<String> parameterizedTypes, boolean parameterizeClassNames,
CodeGeneratorConfig config) {
super(packageName, apiClassname, schema, dataset, config);
this.schema = schema;
this.keyClassName = hollowImplClassname(schema.getKeyType());
this.valueClassName = hollowImplClassname(schema.getValueType());
this.parameterizeKey = parameterizeClassNames || parameterizedTypes.contains(schema.getKeyType());
this.parameterizeValue = parameterizeClassNames || parameterizedTypes.contains(schema.getValueType());
}
@Override
public String generate() {
StringBuilder builder = new StringBuilder();
appendPackageAndCommonImports(builder, apiClassname, Arrays.<HollowSchema>asList(schema));
builder.append("import " + HollowMap.class.getName() + ";\n");
builder.append("import " + HollowMapSchema.class.getName() + ";\n");
builder.append("import " + HollowMapDelegate.class.getName() + ";\n");
builder.append("import " + GenericHollowRecordHelper.class.getName() + ";\n\n");
builder.append("@SuppressWarnings(\"all\")\n");
String keyGeneric = parameterizeKey ? "K" : keyClassName;
String valueGeneric = parameterizeValue ? "V" : valueClassName;
String classGeneric = "";
if(parameterizeKey && parameterizeValue)
classGeneric = "<K, V>";
else if(parameterizeKey)
classGeneric = "<K>";
else if(parameterizeValue)
classGeneric = "<V>";
builder.append("public class " + className + classGeneric + " extends HollowMap<" + keyGeneric + ", " + valueGeneric + "> {\n\n");
appendConstructor(builder);
appendInstantiateMethods(builder);
appendGetByHashKeyMethod(builder);
appendEqualityMethods(builder);
appendAPIAccessor(builder);
appendTypeAPIAccessor(builder);
builder.append("}");
return builder.toString();
}
private void appendConstructor(StringBuilder classBuilder) {
classBuilder.append(" public " + className + "(HollowMapDelegate delegate, int ordinal) {\n");
classBuilder.append(" super(delegate, ordinal);\n");
classBuilder.append(" }\n\n");
}
private void appendInstantiateMethods(StringBuilder classBuilder) {
String keyReturnType = parameterizeKey ? "K" : keyClassName;
String valueReturnType = parameterizeValue ? "V" : valueClassName;
classBuilder.append(" @Override\n");
classBuilder.append(" public " + keyReturnType + " instantiateKey(int ordinal) {\n");
classBuilder.append(" return (" + keyReturnType + ") api().get").append(keyClassName).append("(ordinal);\n");
classBuilder.append(" }\n\n");
classBuilder.append(" @Override\n");
classBuilder.append(" public " + valueReturnType + " instantiateValue(int ordinal) {\n");
classBuilder.append(" return (" + valueReturnType + ") api().get").append(valueClassName).append("(ordinal);\n");
classBuilder.append(" }\n\n");
}
private void appendGetByHashKeyMethod(StringBuilder classBuilder) {
if(schema.getHashKey() != null) {
String valueReturnType = parameterizeValue ? "V" : valueClassName;
classBuilder.append(" public " + valueReturnType + " get(");
classBuilder.append(getKeyFieldType(schema.getHashKey().getFieldPath(0))).append(" k0");
for(int i=1;i<schema.getHashKey().numFields();i++)
classBuilder.append(", ").append(getKeyFieldType(schema.getHashKey().getFieldPath(i))).append(" k").append(i);
classBuilder.append(") {\n");
classBuilder.append(" return findValue(k0");
for(int i=1;i<schema.getHashKey().numFields();i++)
classBuilder.append(", k").append(i);
classBuilder.append(");\n");
classBuilder.append(" }\n\n");
}
}
private void appendEqualityMethods(StringBuilder classBuilder) {
classBuilder.append(" @Override\n");
classBuilder.append(" public boolean equalsKey(int keyOrdinal, Object testObject) {\n");
classBuilder.append(" return GenericHollowRecordHelper.equalObject(getSchema().getKeyType(), keyOrdinal, testObject);\n");
classBuilder.append(" }\n\n");
classBuilder.append(" @Override\n");
classBuilder.append(" public boolean equalsValue(int valueOrdinal, Object testObject) {\n");
classBuilder.append(" return GenericHollowRecordHelper.equalObject(getSchema().getValueType(), valueOrdinal, testObject);\n");
classBuilder.append(" }\n\n");
}
private void appendAPIAccessor(StringBuilder classBuilder) {
classBuilder.append(" public " + apiClassname + " api() {\n");
classBuilder.append(" return typeApi().getAPI();\n");
classBuilder.append(" }\n\n");
}
private void appendTypeAPIAccessor(StringBuilder classBuilder) {
String typeAPIClassname = typeAPIClassname(schema.getName());
classBuilder.append(" public " + typeAPIClassname + " typeApi() {\n");
classBuilder.append(" return (").append(typeAPIClassname).append(") delegate.getTypeAPI();\n");
classBuilder.append(" }\n\n");
}
private String getKeyFieldType(String fieldPath) {
try {
HollowObjectSchema keySchema = (HollowObjectSchema)dataset.getSchema(schema.getKeyType());
String fieldPathElements[] = fieldPath.split("\\.");
int idx = 0;
while(idx < fieldPathElements.length-1) {
keySchema = (HollowObjectSchema)dataset.getSchema(keySchema.getReferencedType(fieldPathElements[idx]));
idx++;
}
FieldType fieldType = keySchema.getFieldType(keySchema.getPosition(fieldPathElements[idx]));
return HollowCodeGenerationUtils.getJavaBoxedType(fieldType);
} catch(Throwable th) { }
throw new IllegalArgumentException("Field path '" + fieldPath + "' specified incorrectly for type: " + schema.getName());
}
}
| 9,317 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/objects/HollowObjectJavaGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.objects;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.delegateInterfaceName;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.generateBooleanAccessorMethodName;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.isPrimitiveType;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.substituteInvalidChars;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.typeAPIClassname;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.uppercase;
import static java.util.stream.Collectors.joining;
import com.netflix.hollow.api.codegen.CodeGeneratorConfig;
import com.netflix.hollow.api.codegen.HollowAPIGenerator;
import com.netflix.hollow.api.codegen.HollowCodeGenerationUtils;
import com.netflix.hollow.api.codegen.HollowConsumerJavaFileGenerator;
import com.netflix.hollow.api.codegen.HollowErgonomicAPIShortcuts;
import com.netflix.hollow.api.codegen.HollowErgonomicAPIShortcuts.Shortcut;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.api.consumer.index.FieldPath;
import com.netflix.hollow.api.consumer.index.UniqueKeyIndex;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.api.objects.HollowObject;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import com.netflix.hollow.core.write.objectmapper.HollowTypeName;
import com.netflix.hollow.tools.stringifier.HollowRecordStringifier;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
/**
* This class contains template logic for generating a {@link HollowAPI} implementation. Not intended for external consumption.
*
* @see HollowAPIGenerator
*/
public class HollowObjectJavaGenerator extends HollowConsumerJavaFileGenerator {
public static final String SUB_PACKAGE_NAME = "";
private final HollowObjectSchema schema;
private final String apiClassname;
private final Set<String> parameterizedTypes;
private final boolean parameterizeClassNames;
private final String getterPrefix;
private final HollowErgonomicAPIShortcuts ergonomicShortcuts;
private final boolean useBooleanFieldErgonomics;
private final boolean restrictApiToFieldType;
public HollowObjectJavaGenerator(String packageName, String apiClassname, HollowObjectSchema schema, Set<String>
parameterizedTypes, boolean parameterizeClassNames, HollowErgonomicAPIShortcuts ergonomicShortcuts,
HollowDataset dataset, CodeGeneratorConfig config) {
super(packageName, computeSubPackageName(schema), dataset, config);
this.apiClassname = apiClassname;
this.schema = schema;
this.className = hollowImplClassname(schema.getName());
this.parameterizedTypes = parameterizedTypes;
this.parameterizeClassNames = parameterizeClassNames;
this.getterPrefix = config.getGetterPrefix();
this.ergonomicShortcuts = ergonomicShortcuts;
this.useBooleanFieldErgonomics = config.isUseBooleanFieldErgonomics();
this.restrictApiToFieldType = config.isRestrictApiToFieldType();
}
private static String computeSubPackageName(HollowObjectSchema schema) {
String type = schema.getName();
if (isPrimitiveType(type)) {
return "core";
}
return SUB_PACKAGE_NAME;
}
@Override
public String generate() {
StringBuilder classBuilder = new StringBuilder();
appendPackageAndCommonImports(classBuilder, apiClassname);
boolean requiresHollowTypeName = !className.equals(schema.getName());
classBuilder.append("import " + HollowConsumer.class.getName() + ";\n");
if (schema.getPrimaryKey() != null && schema.getPrimaryKey().numFields() > 1) {
classBuilder.append("import " + FieldPath.class.getName() + ";\n");
}
if (schema.getPrimaryKey() != null) {
classBuilder.append("import " + UniqueKeyIndex.class.getName() + ";\n");
}
classBuilder.append("import " + HollowObject.class.getName() + ";\n");
classBuilder.append("import " + HollowObjectSchema.class.getName() + ";\n");
if (requiresHollowTypeName) {
classBuilder.append("import " + HollowTypeName.class.getName() + ";\n");
}
if (config.isUseVerboseToString()) {
classBuilder.append("import " + HollowRecordStringifier.class.getName() + ";\n");
}
classBuilder.append("\n");
classBuilder.append("@SuppressWarnings(\"all\")\n");
if (requiresHollowTypeName) {
classBuilder.append("@" + HollowTypeName.class.getSimpleName() + "(name=\"" + schema.getName() + "\")\n");
}
classBuilder.append("public class " + className + " extends HollowObject {\n\n");
appendConstructor(classBuilder);
appendAccessors(classBuilder);
appendAPIAccessor(classBuilder);
appendTypeAPIAccessor(classBuilder);
appendDelegateAccessor(classBuilder);
if (config.isUseVerboseToString()) {
appendToString(classBuilder);
}
if (schema.getPrimaryKey() != null) {
appendPrimaryKey(classBuilder, schema.getPrimaryKey());
}
classBuilder.append("}");
return classBuilder.toString();
}
private void appendConstructor(StringBuilder classBuilder) {
classBuilder.append(" public " + className + "(" + delegateInterfaceName(schema.getName()) + " delegate, int ordinal) {\n");
classBuilder.append(" super(delegate, ordinal);\n");
classBuilder.append(" }\n\n");
}
private void appendAccessors(StringBuilder classBuilder) {
for(int i=0;i<schema.numFields();i++) {
switch(schema.getFieldType(i)) {
case BOOLEAN:
classBuilder.append(generateBooleanFieldAccessor(i));
break;
case BYTES:
classBuilder.append(generateByteArrayFieldAccessor(i));
break;
case DOUBLE:
classBuilder.append(generateDoubleFieldAccessor(i));
break;
case FLOAT:
classBuilder.append(generateFloatFieldAccessor(i));
break;
case INT:
classBuilder.append(generateIntFieldAccessor(i));
break;
case LONG:
classBuilder.append(generateLongFieldAccessor(i));
break;
case REFERENCE:
classBuilder.append(generateReferenceFieldAccessor(i));
break;
case STRING:
classBuilder.append(generateStringFieldAccessors(i));
break;
}
classBuilder.append("\n\n");
}
}
private String generateByteArrayFieldAccessor(int fieldNum) {
StringBuilder builder = new StringBuilder();
String fieldName = substituteInvalidChars(schema.getFieldName(fieldNum));
builder.append(" public byte[] ").append(getterPrefix).append("get" + uppercase(fieldName) + "() {\n");
builder.append(" return delegate().get" + uppercase(fieldName) + "(ordinal);\n");
builder.append(" }");
return builder.toString();
}
private String generateStringFieldAccessors(int fieldNum) {
StringBuilder builder = new StringBuilder();
String fieldName = substituteInvalidChars(schema.getFieldName(fieldNum));
builder.append(" public String ").append(getterPrefix).append("get" + uppercase(fieldName) + "() {\n");
builder.append(" return delegate().get" + uppercase(fieldName) + "(ordinal);\n");
builder.append(" }\n\n");
builder.append(" public boolean ").append(getterPrefix).append("is" + uppercase(fieldName) + "Equal(String testValue) {\n");
builder.append(" return delegate().is" + uppercase(fieldName) + "Equal(ordinal, testValue);\n");
builder.append(" }");
return builder.toString();
}
private String generateReferenceFieldAccessor(int fieldNum) {
Shortcut shortcut = ergonomicShortcuts == null ? null : ergonomicShortcuts.getShortcut(schema.getName() + "." + schema.getFieldName(fieldNum));
String fieldName = substituteInvalidChars(schema.getFieldName(fieldNum));
StringBuilder builder = new StringBuilder();
if(shortcut != null) {
switch(shortcut.getType()) {
case BOOLEAN:
case DOUBLE:
case FLOAT:
case INT:
case LONG:
String methodName = (shortcut.getType()==FieldType.BOOLEAN) ? generateBooleanAccessorMethodName(fieldName, useBooleanFieldErgonomics) : "get" + uppercase(fieldName);
builder.append(" public ").append(HollowCodeGenerationUtils.getJavaBoxedType(shortcut.getType())).append(" ").append(getterPrefix).append(methodName);
if(!restrictApiToFieldType) {
builder.append("Boxed");
}
builder.append("() {\n");
builder.append(" return delegate().get" + uppercase(fieldName) + "Boxed(ordinal);\n");
builder.append(" }\n\n");
if(!restrictApiToFieldType) {
builder.append(" public ").append(HollowCodeGenerationUtils.getJavaScalarType(shortcut.getType())).append(" ").append(getterPrefix).append(methodName + "() {\n");
builder.append(" return delegate().get" + uppercase(fieldName) + "(ordinal);\n");
builder.append(" }\n\n");
}
break;
case BYTES:
builder.append(" public byte[] ").append(getterPrefix).append("get" + uppercase(fieldName) + "() {\n");
builder.append(" return delegate().get" + uppercase(fieldName) + "(ordinal);\n");
builder.append(" }\n\n");
break;
case STRING:
builder.append(" public String ").append(getterPrefix).append("get" + uppercase(fieldName) + "() {\n");
builder.append(" return delegate().get" + uppercase(fieldName) + "(ordinal);\n");
builder.append(" }\n\n");
builder.append(" public boolean ").append(getterPrefix).append("is" + uppercase(fieldName) + "Equal(String testValue) {\n");
builder.append(" return delegate().is" + uppercase(fieldName) + "Equal(ordinal, testValue);\n");
builder.append(" }\n\n");
break;
default:
}
}
String referencedType = schema.getReferencedType(fieldNum);
boolean parameterize = parameterizeClassNames || parameterizedTypes.contains(referencedType);
String methodName = null;
if (shortcut != null) {
methodName = getterPrefix + "get" + uppercase(fieldName) + "HollowReference";
} else {
boolean isBooleanRefType = Boolean.class.getSimpleName().equals(referencedType);
methodName = getterPrefix + (isBooleanRefType ? generateBooleanAccessorMethodName(fieldName, useBooleanFieldErgonomics) : "get" + uppercase(fieldName));
}
if(parameterize)
builder.append(" public <T> T ").append(methodName).append("() {\n");
else
builder.append(" public ").append(hollowImplClassname(referencedType)).append(" ").append(methodName).append("() {\n");
builder.append(" int refOrdinal = delegate().get" + uppercase(fieldName) + "Ordinal(ordinal);\n");
builder.append(" if(refOrdinal == -1)\n");
builder.append(" return null;\n");
builder.append(" return ").append(parameterize ? "(T)" : "").append(" api().get" + hollowImplClassname(referencedType) + "(refOrdinal);\n");
builder.append(" }");
return builder.toString();
}
private String generateFloatFieldAccessor(int fieldNum) {
StringBuilder builder = new StringBuilder();
String fieldName = substituteInvalidChars(schema.getFieldName(fieldNum));
builder.append(" public float ").append(getterPrefix).append("get").append(uppercase(fieldName)).append("() {\n");
builder.append(" return delegate().get" + uppercase(fieldName) + "(ordinal);\n");
builder.append(" }\n\n");
if(!restrictApiToFieldType) {
builder.append(" public Float ").append(getterPrefix).append("get").append(uppercase(fieldName)).append("Boxed() {\n");
builder.append(" return delegate().get" + uppercase(fieldName) + "Boxed(ordinal);\n");
builder.append(" }");
}
return builder.toString();
}
private String generateDoubleFieldAccessor(int fieldNum) {
StringBuilder builder = new StringBuilder();
String fieldName = substituteInvalidChars(schema.getFieldName(fieldNum));
builder.append(" public double ").append(getterPrefix).append("get").append(uppercase(fieldName)).append("() {\n");
builder.append(" return delegate().get" + uppercase(fieldName) + "(ordinal);\n");
builder.append(" }\n\n");
if(!restrictApiToFieldType) {
builder.append(" public Double ").append(getterPrefix).append("get").append(uppercase(fieldName)).append("Boxed() {\n");
builder.append(" return delegate().get" + uppercase(fieldName) + "Boxed(ordinal);\n");
builder.append(" }");
}
return builder.toString();
}
private String generateLongFieldAccessor(int fieldNum) {
StringBuilder builder = new StringBuilder();
String fieldName = substituteInvalidChars(schema.getFieldName(fieldNum));
builder.append(" public long ").append(getterPrefix).append("get").append(uppercase(fieldName)).append("() {\n");
builder.append(" return delegate().get" + uppercase(fieldName) + "(ordinal);\n");
builder.append(" }\n\n");
if(!restrictApiToFieldType) {
builder.append(" public Long ").append(getterPrefix).append("get").append(uppercase(fieldName)).append("Boxed() {\n");
builder.append(" return delegate().get" + uppercase(fieldName) + "Boxed(ordinal);\n");
builder.append(" }");
}
return builder.toString();
}
private String generateIntFieldAccessor(int fieldNum) {
StringBuilder builder = new StringBuilder();
String fieldName = substituteInvalidChars(schema.getFieldName(fieldNum));
builder.append(" public int ").append(getterPrefix).append("get").append(uppercase(fieldName)).append("() {\n");
builder.append(" return delegate().get" + uppercase(fieldName) + "(ordinal);\n");
builder.append(" }\n\n");
if(!restrictApiToFieldType) {
builder.append(" public Integer ").append(getterPrefix).append("get").append(uppercase(fieldName)).append("Boxed() {\n");
builder.append(" return delegate().get" + uppercase(fieldName) + "Boxed(ordinal);\n");
builder.append(" }");
}
return builder.toString();
}
private String generateBooleanFieldAccessor(int fieldNum) {
StringBuilder builder = new StringBuilder();
String fieldName = schema.getFieldName(fieldNum);
String methodName = generateBooleanAccessorMethodName(fieldName, useBooleanFieldErgonomics);
builder.append(" public boolean ").append(getterPrefix).append(methodName).append("() {\n");
builder.append(" return delegate().get" + uppercase(fieldName) + "(ordinal);\n");
builder.append(" }\n\n");
if(!restrictApiToFieldType) {
builder.append(" public Boolean ").append(getterPrefix).append(methodName).append("Boxed() {\n");
builder.append(" return delegate().get" + uppercase(fieldName) + "Boxed(ordinal);\n");
builder.append(" }");
}
return builder.toString();
}
private void appendAPIAccessor(StringBuilder classBuilder) {
classBuilder.append(" public " + apiClassname + " api() {\n");
classBuilder.append(" return typeApi().getAPI();\n");
classBuilder.append(" }\n\n");
}
private void appendTypeAPIAccessor(StringBuilder classBuilder) {
String typeAPIClassname = typeAPIClassname(schema.getName());
classBuilder.append(" public " + typeAPIClassname + " typeApi() {\n");
classBuilder.append(" return delegate().getTypeAPI();\n");
classBuilder.append(" }\n\n");
}
private void appendDelegateAccessor(StringBuilder classBuilder) {
classBuilder.append(" protected ").append(delegateInterfaceName(schema.getName())).append(" delegate() {\n");
classBuilder.append(" return (").append(delegateInterfaceName(schema.getName())).append(")delegate;\n");
classBuilder.append(" }\n\n");
}
private void appendToString(StringBuilder classBuilder) {
classBuilder.append(" public String toString() {\n");
classBuilder.append(" return new HollowRecordStringifier().stringify(this);\n");
classBuilder.append(" }\n\n");
}
private void appendPrimaryKey(StringBuilder classBuilder, PrimaryKey pk) {
if (pk.numFields() == 1) {
String fieldPath = pk.getFieldPath(0);
FieldType fieldType = pk.getFieldType(dataset, 0);
String type, boxedType;
if (FieldType.REFERENCE.equals(fieldType)) {
HollowObjectSchema refSchema = pk.getFieldSchema(dataset, 0);
type = boxedType = hollowImplClassname(refSchema.getName());
} else {
type = HollowCodeGenerationUtils.getJavaScalarType(fieldType);
boxedType = HollowCodeGenerationUtils.getJavaBoxedType(fieldType);
}
appendPrimaryKeyDoc(classBuilder, fieldType, type);
classBuilder.append(" public static UniqueKeyIndex<" + className + ", " + boxedType + "> uniqueIndex(HollowConsumer consumer) {\n");
classBuilder.append(" return UniqueKeyIndex.from(consumer, " + className + ".class)\n");
classBuilder.append(" .bindToPrimaryKey()\n");
classBuilder.append(" .usingPath(\"" + fieldPath + "\", " + type + ".class);\n");
classBuilder.append(" }\n\n");
} else {
appendPrimaryKeyDoc(classBuilder, FieldType.REFERENCE, className + ".Key");
classBuilder.append(" public static UniqueKeyIndex<" + className + ", " + className + ".Key> uniqueIndex(HollowConsumer consumer) {\n");
classBuilder.append(" return UniqueKeyIndex.from(consumer, " + className + ".class)\n");
classBuilder.append(" .bindToPrimaryKey()\n");
classBuilder.append(" .usingBean(" + className + ".Key.class);\n");
classBuilder.append(" }\n\n");
classBuilder.append(" public static class Key {\n");
Map<String, String> parameterList = new LinkedHashMap<>();
for (int i = 0; i < pk.numFields(); i++) {
if (i > 0) {
classBuilder.append("\n");
}
String fieldPath = pk.getFieldPath(i);
String name = HollowCodeGenerationUtils.normalizeFieldPathToParamName(fieldPath);
FieldType fieldType = pk.getFieldType(dataset, i);
String type;
if (FieldType.REFERENCE.equals(fieldType)) {
HollowObjectSchema refSchema = pk.getFieldSchema(dataset, i);
type = hollowImplClassname(refSchema.getName());
} else {
type = HollowCodeGenerationUtils.getJavaScalarType(fieldType);
}
parameterList.put(name, type);
classBuilder.append(" @FieldPath(\"" + fieldPath + "\")\n");
classBuilder.append(" public final " + type + " " + name + ";\n");
}
classBuilder.append("\n");
String parameters = parameterList.entrySet().stream()
.map(e -> e.getValue() + " " + e.getKey())
.collect(joining(", "));
classBuilder.append(" public Key(" + parameters + ") {\n");
parameterList.forEach((n, t) -> {
if (t.equals("byte[]")) {
classBuilder.append(" this." + n + " = " + n + " == null ? null : " + n + ".clone();\n");
} else {
classBuilder.append(" this." + n + " = " + n + ";\n");
}
});
classBuilder.append(" }\n");
classBuilder.append(" }\n\n");
}
}
private void appendPrimaryKeyDoc(StringBuilder classBuilder, FieldType type, String keyTypeName) {
String kindSnippet;
switch (type) {
case STRING:
case REFERENCE:
kindSnippet = String.format("class {@link %s}", keyTypeName);
break;
default:
kindSnippet = String.format("type {@code %s}", keyTypeName);
break;
}
classBuilder.append(" /**\n");
classBuilder.append(String.format(" * Creates a unique key index for {@code %s} that has a primary key.\n", className));
classBuilder.append(String.format(" * The primary key is represented by the %s.\n", kindSnippet));
classBuilder.append(" * <p>\n");
classBuilder.append(" * By default the unique key index will not track updates to the {@code consumer} and thus\n");
classBuilder.append(" * any changes will not be reflected in matched results. To track updates the index must be\n");
classBuilder.append(" * {@link HollowConsumer#addRefreshListener(HollowConsumer.RefreshListener) registered}\n");
classBuilder.append(" * with the {@code consumer}\n");
classBuilder.append(" *\n");
classBuilder.append(" * @param consumer the consumer\n");
classBuilder.append(" * @return the unique key index\n");
classBuilder.append(" */\n");
}
}
| 9,318 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/delegate/HollowObjectDelegateInterfaceGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.delegate;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.delegateInterfaceName;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.substituteInvalidChars;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.typeAPIClassname;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.uppercase;
import com.netflix.hollow.api.codegen.CodeGeneratorConfig;
import com.netflix.hollow.api.codegen.HollowAPIGenerator;
import com.netflix.hollow.api.codegen.HollowCodeGenerationUtils;
import com.netflix.hollow.api.codegen.HollowErgonomicAPIShortcuts;
import com.netflix.hollow.api.codegen.HollowErgonomicAPIShortcuts.Shortcut;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.api.objects.delegate.HollowObjectDelegate;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.schema.HollowObjectSchema;
/**
* This class contains template logic for generating a {@link HollowAPI} implementation. Not intended for external consumption.
*
* @see HollowAPIGenerator
*/
public class HollowObjectDelegateInterfaceGenerator extends HollowObjectDelegateGenerator {
public HollowObjectDelegateInterfaceGenerator(String packageName, HollowObjectSchema schema,
HollowErgonomicAPIShortcuts ergonomicShortcuts, HollowDataset dataset, CodeGeneratorConfig config) {
super(packageName, schema, ergonomicShortcuts, dataset, config);
this.className = delegateInterfaceName(schema.getName());
}
@Override
public String generate() {
StringBuilder classBuilder = new StringBuilder();
appendPackageAndCommonImports(classBuilder);
classBuilder.append("import ").append(HollowObjectDelegate.class.getName()).append(";\n\n");
classBuilder.append("\n@SuppressWarnings(\"all\")\n");
classBuilder.append("public interface ").append(className).append(" extends HollowObjectDelegate {\n\n");
for(int i=0;i<schema.numFields();i++) {
String methodFieldName = substituteInvalidChars(uppercase(schema.getFieldName(i)));
switch(schema.getFieldType(i)) {
case BOOLEAN:
classBuilder.append(" public boolean get").append(methodFieldName).append("(int ordinal);\n\n");
classBuilder.append(" public Boolean get").append(methodFieldName).append("Boxed(int ordinal);\n\n");
break;
case BYTES:
classBuilder.append(" public byte[] get").append(methodFieldName).append("(int ordinal);\n\n");
break;
case DOUBLE:
classBuilder.append(" public double get").append(methodFieldName).append("(int ordinal);\n\n");
classBuilder.append(" public Double get").append(methodFieldName).append("Boxed(int ordinal);\n\n");
break;
case FLOAT:
classBuilder.append(" public float get").append(methodFieldName).append("(int ordinal);\n\n");
classBuilder.append(" public Float get").append(methodFieldName).append("Boxed(int ordinal);\n\n");
break;
case INT:
classBuilder.append(" public int get").append(methodFieldName).append("(int ordinal);\n\n");
classBuilder.append(" public Integer get").append(methodFieldName).append("Boxed(int ordinal);\n\n");
break;
case LONG:
classBuilder.append(" public long get").append(methodFieldName).append("(int ordinal);\n\n");
classBuilder.append(" public Long get").append(methodFieldName).append("Boxed(int ordinal);\n\n");
break;
case REFERENCE:
Shortcut shortcut = ergonomicShortcuts.getShortcut(schema.getName() + "." + schema.getFieldName(i));
if(shortcut != null) {
switch(shortcut.getType()) {
case BOOLEAN:
case DOUBLE:
case FLOAT:
case INT:
case LONG:
classBuilder.append(" public " + HollowCodeGenerationUtils.getJavaScalarType(shortcut.getType()) + " get").append(methodFieldName).append("(int ordinal);\n\n");
classBuilder.append(" public " + HollowCodeGenerationUtils.getJavaBoxedType(shortcut.getType()) + " get").append(methodFieldName).append("Boxed(int ordinal);\n\n");
break;
case BYTES:
classBuilder.append(" public byte[] get").append(methodFieldName).append("(int ordinal);\n\n");
break;
case STRING:
classBuilder.append(" public String get").append(methodFieldName).append("(int ordinal);\n\n");
classBuilder.append(" public boolean is").append(methodFieldName).append("Equal(int ordinal, String testValue);\n\n");
break;
case REFERENCE:
default:
}
}
classBuilder.append(" public int get").append(methodFieldName).append("Ordinal(int ordinal);\n\n");
break;
case STRING:
classBuilder.append(" public String get").append(methodFieldName).append("(int ordinal);\n\n");
classBuilder.append(" public boolean is").append(methodFieldName).append("Equal(int ordinal, String testValue);\n\n");
break;
}
}
classBuilder.append(" public ").append(typeAPIClassname(schema.getName())).append(" getTypeAPI();\n\n");
classBuilder.append("}");
return classBuilder.toString();
}
}
| 9,319 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/delegate/HollowObjectDelegateLookupImplGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.delegate;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.delegateInterfaceName;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.delegateLookupImplName;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.substituteInvalidChars;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.typeAPIClassname;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.uppercase;
import com.netflix.hollow.api.codegen.CodeGeneratorConfig;
import com.netflix.hollow.api.codegen.HollowAPIGenerator;
import com.netflix.hollow.api.codegen.HollowErgonomicAPIShortcuts;
import com.netflix.hollow.api.codegen.HollowErgonomicAPIShortcuts.Shortcut;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.api.objects.delegate.HollowObjectAbstractDelegate;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess;
import com.netflix.hollow.core.schema.HollowObjectSchema;
/**
* This class contains template logic for generating a {@link HollowAPI} implementation. Not intended for external consumption.
*
* @see HollowAPIGenerator
*/
public class HollowObjectDelegateLookupImplGenerator extends HollowObjectDelegateGenerator {
public HollowObjectDelegateLookupImplGenerator(String packageName, HollowObjectSchema schema,
HollowErgonomicAPIShortcuts ergonomicShortcuts, HollowDataset dataset, CodeGeneratorConfig config) {
super(packageName, schema, ergonomicShortcuts, dataset, config);
this.className = delegateLookupImplName(schema.getName());
}
@Override
public String generate() {
StringBuilder builder = new StringBuilder();
appendPackageAndCommonImports(builder);
builder.append("import ").append(HollowObjectAbstractDelegate.class.getName()).append(";\n");
builder.append("import ").append(HollowObjectTypeDataAccess.class.getName()).append(";\n");
builder.append("import ").append(HollowObjectSchema.class.getName()).append(";\n");
builder.append("\n@SuppressWarnings(\"all\")\n");
builder.append("public class ").append(className).append(" extends HollowObjectAbstractDelegate implements ").append(delegateInterfaceName(schema.getName())).append(" {\n\n");
builder.append(" private final ").append(typeAPIClassname(schema.getName())).append(" typeAPI;\n\n");
builder.append(" public ").append(className).append("(").append(typeAPIClassname(schema.getName())).append(" typeAPI) {\n");
builder.append(" this.typeAPI = typeAPI;\n");
builder.append(" }\n\n");
for(int i=0;i<schema.numFields();i++) {
String methodFieldName = substituteInvalidChars(uppercase(schema.getFieldName(i)));
switch(schema.getFieldType(i)) {
case BOOLEAN:
builder.append(" public boolean get").append(methodFieldName).append("(int ordinal) {\n");
builder.append(" return typeAPI.get").append(methodFieldName).append("(ordinal);\n");
builder.append(" }\n\n");
builder.append(" public Boolean get").append(methodFieldName).append("Boxed(int ordinal) {\n");
builder.append(" return typeAPI.get").append(methodFieldName).append("Boxed(ordinal);\n");
builder.append(" }\n\n");
break;
case BYTES:
builder.append(" public byte[] get").append(methodFieldName).append("(int ordinal) {\n");
builder.append(" return typeAPI.get").append(methodFieldName).append("(ordinal);\n");
builder.append(" }\n\n");
break;
case DOUBLE:
builder.append(" public double get").append(methodFieldName).append("(int ordinal) {\n");
builder.append(" return typeAPI.get").append(methodFieldName).append("(ordinal);\n");
builder.append(" }\n\n");
builder.append(" public Double get").append(methodFieldName).append("Boxed(int ordinal) {\n");
builder.append(" return typeAPI.get").append(methodFieldName).append("Boxed(ordinal);\n");
builder.append(" }\n\n");
break;
case FLOAT:
builder.append(" public float get").append(methodFieldName).append("(int ordinal) {\n");
builder.append(" return typeAPI.get").append(methodFieldName).append("(ordinal);\n");
builder.append(" }\n\n");
builder.append(" public Float get").append(methodFieldName).append("Boxed(int ordinal) {\n");
builder.append(" return typeAPI.get").append(methodFieldName).append("Boxed(ordinal);\n");
builder.append(" }\n\n");
break;
case INT:
builder.append(" public int get").append(methodFieldName).append("(int ordinal) {\n");
builder.append(" return typeAPI.get").append(methodFieldName).append("(ordinal);\n");
builder.append(" }\n\n");
builder.append(" public Integer get").append(methodFieldName).append("Boxed(int ordinal) {\n");
builder.append(" return typeAPI.get").append(methodFieldName).append("Boxed(ordinal);\n");
builder.append(" }\n\n");
break;
case LONG:
builder.append(" public long get").append(methodFieldName).append("(int ordinal) {\n");
builder.append(" return typeAPI.get").append(methodFieldName).append("(ordinal);\n");
builder.append(" }\n\n");
builder.append(" public Long get").append(methodFieldName).append("Boxed(int ordinal) {\n");
builder.append(" return typeAPI.get").append(methodFieldName).append("Boxed(ordinal);\n");
builder.append(" }\n\n");
break;
case STRING:
builder.append(" public String get").append(methodFieldName).append("(int ordinal) {\n");
builder.append(" return typeAPI.get").append(methodFieldName).append("(ordinal);\n");
builder.append(" }\n\n");
builder.append(" public boolean is").append(methodFieldName).append("Equal(int ordinal, String testValue) {\n");
builder.append(" return typeAPI.is").append(methodFieldName).append("Equal(ordinal, testValue);\n");
builder.append(" }\n\n");
break;
case REFERENCE:
Shortcut shortcut = ergonomicShortcuts.getShortcut(schema.getName() + "." + schema.getFieldName(i));
if(shortcut != null) {
addShortcutAccessMethod(builder, methodFieldName, shortcut);
}
builder.append(" public int get").append(methodFieldName).append("Ordinal(int ordinal) {\n");
builder.append(" return typeAPI.get").append(methodFieldName).append("Ordinal(ordinal);\n");
builder.append(" }\n\n");
break;
}
}
builder.append(" public ").append(typeAPIClassname(schema.getName())).append(" getTypeAPI() {\n");
builder.append(" return typeAPI;\n");
builder.append(" }\n\n");
builder.append(" @Override\n");
builder.append(" public HollowObjectSchema getSchema() {\n");
builder.append(" return typeAPI.getTypeDataAccess().getSchema();\n");
builder.append(" }\n\n");
builder.append(" @Override\n");
builder.append(" public HollowObjectTypeDataAccess getTypeDataAccess() {\n");
builder.append(" return typeAPI.getTypeDataAccess();\n");
builder.append(" }\n\n");
builder.append("}");
return builder.toString();
}
private void addShortcutAccessMethod(StringBuilder builder, String methodFieldName, Shortcut shortcut) {
String finalFieldName = substituteInvalidChars(uppercase(shortcut.getPath()[shortcut.getPath().length-1]));
String finalTypeAPI = typeAPIClassname(shortcut.getPathTypes()[shortcut.getPathTypes().length-1]);
switch(shortcut.getType()) {
case BOOLEAN:
builder.append(" public boolean get").append(methodFieldName).append("(int ordinal) {\n");
builder.append(" ordinal = typeAPI.get").append(methodFieldName).append("Ordinal(ordinal);\n");
addShortcutTraversal(builder, shortcut);
builder.append(" return ordinal == -1 ? false : typeAPI.getAPI().get" + finalTypeAPI + "().get").append(finalFieldName).append("(ordinal);\n");
builder.append(" }\n\n");
builder.append(" public Boolean get").append(methodFieldName).append("Boxed(int ordinal) {\n");
builder.append(" ordinal = typeAPI.get").append(methodFieldName).append("Ordinal(ordinal);\n");
addShortcutTraversal(builder, shortcut);
builder.append(" return ordinal == -1 ? null : typeAPI.getAPI().get" + finalTypeAPI + "().get").append(finalFieldName).append("Boxed(ordinal);\n");
builder.append(" }\n\n");
break;
case BYTES:
builder.append(" public byte[] get").append(methodFieldName).append("(int ordinal) {\n");
builder.append(" ordinal = typeAPI.get").append(methodFieldName).append("Ordinal(ordinal);\n");
addShortcutTraversal(builder, shortcut);
builder.append(" return ordinal == -1 ? null : typeAPI.getAPI().get" + finalTypeAPI + "().get").append(finalFieldName).append("(ordinal);\n");
builder.append(" }\n\n");
break;
case DOUBLE:
builder.append(" public double get").append(methodFieldName).append("(int ordinal) {\n");
builder.append(" ordinal = typeAPI.get").append(methodFieldName).append("Ordinal(ordinal);\n");
addShortcutTraversal(builder, shortcut);
builder.append(" return ordinal == -1 ? Double.NaN : typeAPI.getAPI().get" + finalTypeAPI + "().get").append(finalFieldName).append("(ordinal);\n");
builder.append(" }\n\n");
builder.append(" public Double get").append(methodFieldName).append("Boxed(int ordinal) {\n");
builder.append(" ordinal = typeAPI.get").append(methodFieldName).append("Ordinal(ordinal);\n");
addShortcutTraversal(builder, shortcut);
builder.append(" return ordinal == -1 ? null : typeAPI.getAPI().get" + finalTypeAPI + "().get").append(finalFieldName).append("Boxed(ordinal);\n");
builder.append(" }\n\n");
break;
case FLOAT:
builder.append(" public float get").append(methodFieldName).append("(int ordinal) {\n");
builder.append(" ordinal = typeAPI.get").append(methodFieldName).append("Ordinal(ordinal);\n");
addShortcutTraversal(builder, shortcut);
builder.append(" return ordinal == -1 ? Float.NaN : typeAPI.getAPI().get" + finalTypeAPI + "().get").append(finalFieldName).append("(ordinal);\n");
builder.append(" }\n\n");
builder.append(" public Float get").append(methodFieldName).append("Boxed(int ordinal) {\n");
builder.append(" ordinal = typeAPI.get").append(methodFieldName).append("Ordinal(ordinal);\n");
addShortcutTraversal(builder, shortcut);
builder.append(" return ordinal == -1 ? null : typeAPI.getAPI().get" + finalTypeAPI + "().get").append(finalFieldName).append("Boxed(ordinal);\n");
builder.append(" }\n\n");
break;
case INT:
builder.append(" public int get").append(methodFieldName).append("(int ordinal) {\n");
builder.append(" ordinal = typeAPI.get").append(methodFieldName).append("Ordinal(ordinal);\n");
addShortcutTraversal(builder, shortcut);
builder.append(" return ordinal == -1 ? Integer.MIN_VALUE : typeAPI.getAPI().get" + finalTypeAPI + "().get").append(finalFieldName).append("(ordinal);\n");
builder.append(" }\n\n");
builder.append(" public Integer get").append(methodFieldName).append("Boxed(int ordinal) {\n");
builder.append(" ordinal = typeAPI.get").append(methodFieldName).append("Ordinal(ordinal);\n");
addShortcutTraversal(builder, shortcut);
builder.append(" return ordinal == -1 ? null : typeAPI.getAPI().get" + finalTypeAPI + "().get").append(finalFieldName).append("Boxed(ordinal);\n");
builder.append(" }\n\n");
break;
case LONG:
builder.append(" public long get").append(methodFieldName).append("(int ordinal) {\n");
builder.append(" ordinal = typeAPI.get").append(methodFieldName).append("Ordinal(ordinal);\n");
addShortcutTraversal(builder, shortcut);
builder.append(" return ordinal == -1 ? Long.MIN_VALUE : typeAPI.getAPI().get" + finalTypeAPI + "().get").append(finalFieldName).append("(ordinal);\n");
builder.append(" }\n\n");
builder.append(" public Long get").append(methodFieldName).append("Boxed(int ordinal) {\n");
builder.append(" ordinal = typeAPI.get").append(methodFieldName).append("Ordinal(ordinal);\n");
addShortcutTraversal(builder, shortcut);
builder.append(" return ordinal == -1 ? null : typeAPI.getAPI().get" + finalTypeAPI + "().get").append(finalFieldName).append("Boxed(ordinal);\n");
builder.append(" }\n\n");
break;
case STRING:
builder.append(" public String get").append(methodFieldName).append("(int ordinal) {\n");
builder.append(" ordinal = typeAPI.get").append(methodFieldName).append("Ordinal(ordinal);\n");
addShortcutTraversal(builder, shortcut);
builder.append(" return ordinal == -1 ? null : typeAPI.getAPI().get" + finalTypeAPI + "().get").append(finalFieldName).append("(ordinal);\n");
builder.append(" }\n\n");
builder.append(" public boolean is").append(methodFieldName).append("Equal(int ordinal, String testValue) {\n");
builder.append(" ordinal = typeAPI.get").append(methodFieldName).append("Ordinal(ordinal);\n");
addShortcutTraversal(builder, shortcut);
builder.append(" return ordinal == -1 ? testValue == null : typeAPI.getAPI().get" + finalTypeAPI + "().is").append(finalFieldName).append("Equal(ordinal, testValue);\n");
builder.append(" }\n\n");
break;
default:
throw new IllegalArgumentException();
}
}
private void addShortcutTraversal(StringBuilder builder, Shortcut shortcut) {
for(int i=0;i<shortcut.getPath().length-1;i++) {
String typeAPIClassname = typeAPIClassname(shortcut.getPathTypes()[i]);
builder.append(" if(ordinal != -1) ordinal = typeAPI.getAPI().get" + typeAPIClassname + "().get" + uppercase(shortcut.getPath()[i]) + "Ordinal(ordinal);\n");
}
}
}
| 9,320 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/delegate/HollowObjectDelegateGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.hollow.api.codegen.delegate;
import com.netflix.hollow.api.codegen.CodeGeneratorConfig;
import com.netflix.hollow.api.codegen.HollowConsumerJavaFileGenerator;
import com.netflix.hollow.api.codegen.HollowErgonomicAPIShortcuts;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.schema.HollowObjectSchema;
public abstract class HollowObjectDelegateGenerator extends HollowConsumerJavaFileGenerator {
public static final String SUB_PACKAGE_NAME = "core";
protected final HollowObjectSchema schema;
protected final HollowErgonomicAPIShortcuts ergonomicShortcuts;
public HollowObjectDelegateGenerator(String packageName, HollowObjectSchema schema,
HollowErgonomicAPIShortcuts ergonomicShortcuts, HollowDataset dataset, CodeGeneratorConfig config) {
super(packageName, SUB_PACKAGE_NAME, dataset, config);
this.schema = schema;
this.ergonomicShortcuts = ergonomicShortcuts;
}
} | 9,321 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/delegate/HollowObjectDelegateCachedImplGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.delegate;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.delegateCachedImplName;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.delegateInterfaceName;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.substituteInvalidChars;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.typeAPIClassname;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.uppercase;
import com.netflix.hollow.api.codegen.CodeGeneratorConfig;
import com.netflix.hollow.api.codegen.HollowAPIGenerator;
import com.netflix.hollow.api.codegen.HollowCodeGenerationUtils;
import com.netflix.hollow.api.codegen.HollowErgonomicAPIShortcuts;
import com.netflix.hollow.api.codegen.HollowErgonomicAPIShortcuts.Shortcut;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.api.custom.HollowTypeAPI;
import com.netflix.hollow.api.objects.delegate.HollowCachedDelegate;
import com.netflix.hollow.api.objects.delegate.HollowObjectAbstractDelegate;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
/**
* This class contains template logic for generating a {@link HollowAPI} implementation. Not intended for external consumption.
*
* @see HollowAPIGenerator
*/
public class HollowObjectDelegateCachedImplGenerator extends HollowObjectDelegateGenerator {
public HollowObjectDelegateCachedImplGenerator(String packageName, HollowObjectSchema schema,
HollowErgonomicAPIShortcuts ergonomicShortcuts, HollowDataset dataset, CodeGeneratorConfig config) {
super(packageName, schema, ergonomicShortcuts, dataset, config);
this.className = delegateCachedImplName(schema.getName());
}
@Override
public String generate() {
StringBuilder builder = new StringBuilder();
appendPackageAndCommonImports(builder);
builder.append("import ").append(HollowObjectAbstractDelegate.class.getName()).append(";\n");
builder.append("import ").append(HollowObjectTypeDataAccess.class.getName()).append(";\n");
builder.append("import ").append(HollowObjectSchema.class.getName()).append(";\n");
builder.append("import ").append(HollowTypeAPI.class.getName()).append(";\n");
builder.append("import ").append(HollowCachedDelegate.class.getName()).append(";\n");
builder.append("\n@SuppressWarnings(\"all\")\n");
builder.append("public class ").append(className).append(" extends HollowObjectAbstractDelegate implements HollowCachedDelegate, ").append(delegateInterfaceName(schema.getName())).append(" {\n\n");
for(int i=0;i<schema.numFields();i++) {
switch(schema.getFieldType(i)) {
case BOOLEAN:
builder.append(" private final Boolean ").append(substituteInvalidChars(schema.getFieldName(i))).append(";\n");
break;
case BYTES:
builder.append(" private final byte[] ").append(substituteInvalidChars(schema.getFieldName(i))).append(";\n");
break;
case DOUBLE:
builder.append(" private final Double ").append(substituteInvalidChars(schema.getFieldName(i))).append(";\n");
break;
case FLOAT:
builder.append(" private final Float ").append(substituteInvalidChars(schema.getFieldName(i))).append(";\n");
break;
case INT:
builder.append(" private final Integer ").append(substituteInvalidChars(schema.getFieldName(i))).append(";\n");
break;
case LONG:
builder.append(" private final Long ").append(substituteInvalidChars(schema.getFieldName(i))).append(";\n");
break;
case REFERENCE:
Shortcut shortcut = ergonomicShortcuts.getShortcut(schema.getName() + "." + schema.getFieldName(i));
if(shortcut != null)
builder.append(" private final ").append(HollowCodeGenerationUtils.getJavaBoxedType(shortcut.getType())).append(" ").append(substituteInvalidChars(schema.getFieldName(i))).append(";\n");
builder.append(" private final int ").append(substituteInvalidChars(schema.getFieldName(i))).append("Ordinal;\n");
break;
case STRING:
builder.append(" private final String ").append(substituteInvalidChars(schema.getFieldName(i))).append(";\n");
break;
}
}
builder.append(" private ").append(typeAPIClassname(schema.getName())).append(" typeAPI;\n\n");
builder.append(" public ").append(className).append("(").append(typeAPIClassname(schema.getName())).append(" typeAPI, int ordinal) {\n");
for(int i=0;i<schema.numFields();i++) {
String fieldName = substituteInvalidChars(schema.getFieldName(i));
switch(schema.getFieldType(i)) {
case STRING:
case BYTES:
builder.append(" this.").append(fieldName).append(" = typeAPI.get").append(uppercase(fieldName)).append("(ordinal);\n");
break;
case BOOLEAN:
case DOUBLE:
case FLOAT:
case INT:
case LONG:
builder.append(" this.").append(fieldName).append(" = typeAPI.get").append(uppercase(fieldName)).append("Boxed(ordinal);\n");
break;
case REFERENCE:
builder.append(" this.").append(fieldName).append("Ordinal = typeAPI.get").append(uppercase(fieldName)).append("Ordinal(ordinal);\n");
Shortcut shortcut = ergonomicShortcuts.getShortcut(schema.getName() + "." + schema.getFieldName(i));
if(shortcut != null) {
String ordinalVariableName = fieldName + "TempOrdinal";
builder.append(" int ").append(ordinalVariableName).append(" = ").append(fieldName).append("Ordinal;\n");
for(int j=0;j<shortcut.getPath().length-1;j++) {
String typeAPIName = HollowCodeGenerationUtils.typeAPIClassname(shortcut.getPathTypes()[j]);
builder.append(" " + ordinalVariableName + " = " + ordinalVariableName + " == -1 ? -1 : typeAPI.getAPI().get").append(typeAPIName).append("().get").append(uppercase(shortcut.getPath()[j])).append("Ordinal(").append(ordinalVariableName).append(");\n");
}
String typeAPIName = HollowCodeGenerationUtils.typeAPIClassname(shortcut.getPathTypes()[shortcut.getPathTypes().length-1]);
builder.append(" this.").append(fieldName).append(" = ").append(ordinalVariableName).append(" == -1 ? null : ").append("typeAPI.getAPI().get").append(typeAPIName).append("().get").append(uppercase(shortcut.getPath()[shortcut.getPath().length-1])).append("(").append(ordinalVariableName).append(");\n");
}
}
}
builder.append(" this.typeAPI = typeAPI;\n");
builder.append(" }\n\n");
for(int i=0;i<schema.numFields();i++) {
FieldType fieldType = schema.getFieldType(i);
String fieldName = substituteInvalidChars(schema.getFieldName(i));
if(schema.getFieldType(i) == FieldType.REFERENCE) {
Shortcut shortcut = ergonomicShortcuts.getShortcut(schema.getName() + "." + schema.getFieldName(i));
if(shortcut != null)
addAccessor(builder, shortcut.getType(), fieldName);
builder.append(" public int get").append(uppercase(fieldName)).append("Ordinal(int ordinal) {\n");
builder.append(" return ").append(fieldName).append("Ordinal;\n");
builder.append(" }\n\n");
} else {
addAccessor(builder, fieldType, fieldName);
}
}
builder.append(" @Override\n");
builder.append(" public HollowObjectSchema getSchema() {\n");
builder.append(" return typeAPI.getTypeDataAccess().getSchema();\n");
builder.append(" }\n\n");
builder.append(" @Override\n");
builder.append(" public HollowObjectTypeDataAccess getTypeDataAccess() {\n");
builder.append(" return typeAPI.getTypeDataAccess();\n");
builder.append(" }\n\n");
builder.append(" public ").append(typeAPIClassname(schema.getName())).append(" getTypeAPI() {\n");
builder.append(" return typeAPI;\n");
builder.append(" }\n\n");
builder.append(" public void updateTypeAPI(HollowTypeAPI typeAPI) {\n");
builder.append(" this.typeAPI = (").append(typeAPIClassname(schema.getName())).append(") typeAPI;\n");
builder.append(" }\n\n");
builder.append("}");
return builder.toString();
}
private void addAccessor(StringBuilder builder, FieldType fieldType, String fieldName) {
switch(fieldType) {
case BOOLEAN:
builder.append(" public boolean get").append(uppercase(fieldName)).append("(int ordinal) {\n");
builder.append(" if(").append(fieldName).append(" == null)\n");
builder.append(" return false;\n");
builder.append(" return ").append(fieldName).append(".booleanValue();\n");
builder.append(" }\n\n");
builder.append(" public Boolean get").append(uppercase(fieldName)).append("Boxed(int ordinal) {\n");
builder.append(" return ").append(fieldName).append(";\n");
builder.append(" }\n\n");
break;
case BYTES:
builder.append(" public byte[] get").append(uppercase(fieldName)).append("(int ordinal) {\n");
// we need the cast to get around http://findbugs.sourceforge.net/bugDescriptions.html#EI_EXPOSE_REP
builder.append(" return (byte[]) ").append(fieldName).append(";\n");
builder.append(" }\n\n");
break;
case DOUBLE:
builder.append(" public double get").append(uppercase(fieldName)).append("(int ordinal) {\n");
builder.append(" if(").append(fieldName).append(" == null)\n");
builder.append(" return Double.NaN;\n");
builder.append(" return ").append(fieldName).append(".doubleValue();\n");
builder.append(" }\n\n");
builder.append(" public Double get").append(uppercase(fieldName)).append("Boxed(int ordinal) {\n");
builder.append(" return ").append(fieldName).append(";\n");
builder.append(" }\n\n");
break;
case FLOAT:
builder.append(" public float get").append(uppercase(fieldName)).append("(int ordinal) {\n");
builder.append(" if(").append(fieldName).append(" == null)\n");
builder.append(" return Float.NaN;\n");
builder.append(" return ").append(fieldName).append(".floatValue();\n");
builder.append(" }\n\n");
builder.append(" public Float get").append(uppercase(fieldName)).append("Boxed(int ordinal) {\n");
builder.append(" return ").append(fieldName).append(";\n");
builder.append(" }\n\n");
break;
case INT:
builder.append(" public int get").append(uppercase(fieldName)).append("(int ordinal) {\n");
builder.append(" if(").append(fieldName).append(" == null)\n");
builder.append(" return Integer.MIN_VALUE;\n");
builder.append(" return ").append(fieldName).append(".intValue();\n");
builder.append(" }\n\n");
builder.append(" public Integer get").append(uppercase(fieldName)).append("Boxed(int ordinal) {\n");
builder.append(" return ").append(fieldName).append(";\n");
builder.append(" }\n\n");
break;
case LONG:
builder.append(" public long get").append(uppercase(fieldName)).append("(int ordinal) {\n");
builder.append(" if(").append(fieldName).append(" == null)\n");
builder.append(" return Long.MIN_VALUE;\n");
builder.append(" return ").append(fieldName).append(".longValue();\n");
builder.append(" }\n\n");
builder.append(" public Long get").append(uppercase(fieldName)).append("Boxed(int ordinal) {\n");
builder.append(" return ").append(fieldName).append(";\n");
builder.append(" }\n\n");
break;
case STRING:
builder.append(" public String get").append(uppercase(fieldName)).append("(int ordinal) {\n");
builder.append(" return ").append(fieldName).append(";\n");
builder.append(" }\n\n");
builder.append(" public boolean is").append(uppercase(fieldName)).append("Equal(int ordinal, String testValue) {\n");
builder.append(" if(testValue == null)\n");
builder.append(" return ").append(fieldName).append(" == null;\n");
builder.append(" return testValue.equals(").append(fieldName).append(");\n");
builder.append(" }\n\n");
break;
case REFERENCE:
throw new IllegalArgumentException();
}
}
}
| 9,322 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/api/TypeAPIObjectJavaGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.api;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.delegateLookupClassname;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.substituteInvalidChars;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.typeAPIClassname;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.uppercase;
import com.netflix.hollow.api.codegen.CodeGeneratorConfig;
import com.netflix.hollow.api.codegen.HollowAPIGenerator;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.api.custom.HollowObjectTypeAPI;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.write.HollowObjectWriteRecord;
import java.util.Comparator;
import java.util.Set;
import java.util.TreeSet;
/**
* This class contains template logic for generating a {@link HollowAPI} implementation. Not intended for external consumption.
*
* @see HollowAPIGenerator
*
* @author dkoszewnik
*
*/
public class TypeAPIObjectJavaGenerator extends HollowTypeAPIGenerator {
private final HollowObjectSchema objectSchema;
private final Set<Class<?>> importClasses = new TreeSet<Class<?>>(new Comparator<Class<?>>() {
@Override
public int compare(Class<?> o1, Class<?> o2) {
return o1.getName().compareTo(o2.getName());
}
});
public TypeAPIObjectJavaGenerator(String apiClassname, String packageName, HollowObjectSchema schema,
HollowDataset dataset, CodeGeneratorConfig config) {
super(apiClassname, packageName, schema, dataset, config);
this.objectSchema = schema;
this.importClasses.add(HollowObjectTypeAPI.class);
this.importClasses.add(HollowObjectTypeDataAccess.class);
}
@Override
public String generate() {
StringBuilder classBodyBuilder = new StringBuilder();
classBodyBuilder.append("@SuppressWarnings(\"all\")\n");
classBodyBuilder.append("public class " + className + " extends HollowObjectTypeAPI {\n\n");
classBodyBuilder.append(" private final ").append(delegateLookupClassname(objectSchema)).append(" delegateLookupImpl;\n\n");
classBodyBuilder.append(generateConstructor());
classBodyBuilder.append("\n\n");
for(int i=0;i<objectSchema.numFields();i++) {
switch(objectSchema.getFieldType(i)) {
case BOOLEAN:
classBodyBuilder.append(generateBooleanFieldAccessor(i));
break;
case BYTES:
classBodyBuilder.append(generateByteArrayFieldAccessor(i));
break;
case DOUBLE:
classBodyBuilder.append(generateDoubleFieldAccessor(i));
break;
case FLOAT:
classBodyBuilder.append(generateFloatFieldAccessor(i));
break;
case INT:
classBodyBuilder.append(generateIntFieldAccessor(i));
break;
case LONG:
classBodyBuilder.append(generateLongFieldAccessor(i));
break;
case REFERENCE:
classBodyBuilder.append(generateReferenceFieldAccessors(i));
break;
case STRING:
classBodyBuilder.append(generateStringFieldAccessors(i));
break;
}
classBodyBuilder.append("\n\n");
}
classBodyBuilder.append(" public ").append(delegateLookupClassname(objectSchema)).append(" getDelegateLookupImpl() {\n");
classBodyBuilder.append(" return delegateLookupImpl;\n");
classBodyBuilder.append(" }\n\n");
classBodyBuilder.append(" @Override\n");
classBodyBuilder.append(" public ").append(apiClassname).append(" getAPI() {\n");
classBodyBuilder.append(" return (").append(apiClassname).append(") api;\n");
classBodyBuilder.append(" }\n\n");
classBodyBuilder.append("}");
StringBuilder classBuilder = new StringBuilder();
appendPackageAndCommonImports(classBuilder, apiClassname);
for(Class<?> clazz : importClasses) {
classBuilder.append("import ").append(clazz.getName()).append(";\n");
}
classBuilder.append("\n");
classBuilder.append(classBodyBuilder.toString());
return classBuilder.toString();
}
private String generateConstructor() {
StringBuilder builder = new StringBuilder();
builder.append(" public " + className + "(" + apiClassname + " api, HollowObjectTypeDataAccess typeDataAccess) {\n");
builder.append(" super(api, typeDataAccess, new String[] {\n");
for(int i=0;i<objectSchema.numFields();i++) {
builder.append(" \"" + objectSchema.getFieldName(i) + "\"");
if(i < objectSchema.numFields() - 1)
builder.append(",");
builder.append("\n");
}
builder.append(" });\n");
builder.append(" this.delegateLookupImpl = new ").append(delegateLookupClassname(objectSchema)).append("(this);\n");
builder.append(" }");
return builder.toString();
}
private String generateByteArrayFieldAccessor(int fieldNum) {
StringBuilder builder = new StringBuilder();
String fieldName = substituteInvalidChars(objectSchema.getFieldName(fieldNum));
builder.append(" public byte[] get" + uppercase(fieldName) + "(int ordinal) {\n");
builder.append(" if(fieldIndex[" + fieldNum +"] == -1)\n");
builder.append(" return missingDataHandler().handleBytes(\"").append(objectSchema.getName()).append("\", ordinal, \"").append(fieldName).append("\");\n");
builder.append(" boxedFieldAccessSampler.recordFieldAccess(fieldIndex[" + fieldNum + "]);\n");
builder.append(" return getTypeDataAccess().readBytes(ordinal, fieldIndex[" + fieldNum + "]);\n");
builder.append(" }\n\n");
return builder.toString();
}
private String generateStringFieldAccessors(int fieldNum) {
StringBuilder builder = new StringBuilder();
String fieldName = substituteInvalidChars(objectSchema.getFieldName(fieldNum));
builder.append(" public String get" + uppercase(fieldName) + "(int ordinal) {\n");
builder.append(" if(fieldIndex[" + fieldNum +"] == -1)\n");
builder.append(" return missingDataHandler().handleString(\"").append(objectSchema.getName()).append("\", ordinal, \"").append(fieldName).append("\");\n");
builder.append(" boxedFieldAccessSampler.recordFieldAccess(fieldIndex[" + fieldNum + "]);\n");
builder.append(" return getTypeDataAccess().readString(ordinal, fieldIndex[" + fieldNum + "]);\n");
builder.append(" }\n\n");
builder.append(" public boolean is" + uppercase(fieldName) + "Equal(int ordinal, String testValue) {\n");
builder.append(" if(fieldIndex[" + fieldNum +"] == -1)\n");
builder.append(" return missingDataHandler().handleStringEquals(\"").append(objectSchema.getName()).append("\", ordinal, \"").append(fieldName).append("\", testValue);\n");
builder.append(" return getTypeDataAccess().isStringFieldEqual(ordinal, fieldIndex[" + fieldNum + "], testValue);\n");
builder.append(" }");
return builder.toString();
}
private String generateReferenceFieldAccessors(int fieldNum) {
StringBuilder builder = new StringBuilder();
String fieldName = substituteInvalidChars(objectSchema.getFieldName(fieldNum));
String referencedType = substituteInvalidChars(objectSchema.getReferencedType(fieldNum));
builder.append(" public int get"+ uppercase(fieldName) + "Ordinal(int ordinal) {\n");
builder.append(" if(fieldIndex[" + fieldNum +"] == -1)\n");
builder.append(" return missingDataHandler().handleReferencedOrdinal(\"").append(objectSchema.getName()).append("\", ordinal, \"").append(fieldName).append("\");\n");
builder.append(" return getTypeDataAccess().readOrdinal(ordinal, fieldIndex[" + fieldNum + "]);\n");
builder.append(" }\n\n");
builder.append(" public " + typeAPIClassname(referencedType) + " get" + uppercase(fieldName) + "TypeAPI() {\n");
builder.append(" return getAPI().get").append(uppercase(referencedType)).append("TypeAPI();\n");
builder.append(" }");
return builder.toString();
}
private String generateDoubleFieldAccessor(int fieldNum) {
StringBuilder builder = new StringBuilder();
String fieldName = substituteInvalidChars(objectSchema.getFieldName(fieldNum));
builder.append(" public double get").append(uppercase(fieldName)).append("(int ordinal) {\n");
builder.append(" if(fieldIndex[" + fieldNum +"] == -1)\n");
builder.append(" return missingDataHandler().handleDouble(\"").append(objectSchema.getName()).append("\", ordinal, \"").append(fieldName).append("\");\n");
builder.append(" return getTypeDataAccess().readDouble(ordinal, fieldIndex["+fieldNum+"]);\n");
builder.append(" }\n\n");
builder.append(" public Double get").append(uppercase(fieldName)).append("Boxed(int ordinal) {\n");
builder.append(" double d;\n");
builder.append(" if(fieldIndex[" + fieldNum +"] == -1) {\n");
builder.append(" d = missingDataHandler().handleDouble(\"").append(objectSchema.getName()).append("\", ordinal, \"").append(fieldName).append("\");\n");
builder.append(" } else {\n");
builder.append(" boxedFieldAccessSampler.recordFieldAccess(fieldIndex[" + fieldNum + "]);\n");
builder.append(" d = getTypeDataAccess().readDouble(ordinal, fieldIndex["+fieldNum+"]);\n");
builder.append(" }\n");
builder.append(" return Double.isNaN(d) ? null : Double.valueOf(d);\n");
builder.append(" }\n\n");
importClasses.add(HollowObjectWriteRecord.class);
return builder.toString();
}
private String generateFloatFieldAccessor(int fieldNum) {
StringBuilder builder = new StringBuilder();
String fieldName = substituteInvalidChars(objectSchema.getFieldName(fieldNum));
builder.append(" public float get").append(uppercase(fieldName)).append("(int ordinal) {\n");
builder.append(" if(fieldIndex[" + fieldNum +"] == -1)\n");
builder.append(" return missingDataHandler().handleFloat(\"").append(objectSchema.getName()).append("\", ordinal, \"").append(fieldName).append("\");\n");
builder.append(" return getTypeDataAccess().readFloat(ordinal, fieldIndex["+fieldNum+"]);\n");
builder.append(" }\n\n");
builder.append(" public Float get").append(uppercase(fieldName)).append("Boxed(int ordinal) {\n");
builder.append(" float f;\n");
builder.append(" if(fieldIndex[" + fieldNum +"] == -1) {\n");
builder.append(" f = missingDataHandler().handleFloat(\"").append(objectSchema.getName()).append("\", ordinal, \"").append(fieldName).append("\");\n");
builder.append(" } else {\n");
builder.append(" boxedFieldAccessSampler.recordFieldAccess(fieldIndex[" + fieldNum + "]);\n");
builder.append(" f = getTypeDataAccess().readFloat(ordinal, fieldIndex["+fieldNum+"]);\n");
builder.append(" }");
builder.append(" return Float.isNaN(f) ? null : Float.valueOf(f);\n");
builder.append(" }\n\n");
importClasses.add(HollowObjectWriteRecord.class);
return builder.toString();
}
private String generateLongFieldAccessor(int fieldNum) {
StringBuilder builder = new StringBuilder();
String fieldName = substituteInvalidChars(objectSchema.getFieldName(fieldNum));
builder.append(" public long get").append(uppercase(fieldName)).append("(int ordinal) {\n");
builder.append(" if(fieldIndex[" + fieldNum +"] == -1)\n");
builder.append(" return missingDataHandler().handleLong(\"").append(objectSchema.getName()).append("\", ordinal, \"").append(fieldName).append("\");\n");
builder.append(" return getTypeDataAccess().readLong(ordinal, fieldIndex[" + fieldNum + "]);\n");
builder.append(" }\n\n");
builder.append(" public Long get").append(uppercase(fieldName)).append("Boxed(int ordinal) {\n");
builder.append(" long l;\n");
builder.append(" if(fieldIndex[" + fieldNum +"] == -1) {\n");
builder.append(" l = missingDataHandler().handleLong(\"").append(objectSchema.getName()).append("\", ordinal, \"").append(fieldName).append("\");\n");
builder.append(" } else {\n");
builder.append(" boxedFieldAccessSampler.recordFieldAccess(fieldIndex[" + fieldNum + "]);\n");
builder.append(" l = getTypeDataAccess().readLong(ordinal, fieldIndex[" + fieldNum + "]);\n");
builder.append(" }\n");
builder.append(" if(l == Long.MIN_VALUE)\n");
builder.append(" return null;\n");
builder.append(" return Long.valueOf(l);\n");
builder.append(" }\n\n");
return builder.toString();
}
private String generateIntFieldAccessor(int fieldNum) {
StringBuilder builder = new StringBuilder();
String fieldName = substituteInvalidChars(objectSchema.getFieldName(fieldNum));
builder.append(" public int get").append(uppercase(fieldName)).append("(int ordinal) {\n");
builder.append(" if(fieldIndex[" + fieldNum +"] == -1)\n");
builder.append(" return missingDataHandler().handleInt(\"").append(objectSchema.getName()).append("\", ordinal, \"").append(fieldName).append("\");\n");
builder.append(" return getTypeDataAccess().readInt(ordinal, fieldIndex[" + fieldNum + "]);\n");
builder.append(" }\n\n");
builder.append(" public Integer get").append(uppercase(fieldName)).append("Boxed(int ordinal) {\n");
builder.append(" int i;\n");
builder.append(" if(fieldIndex[" + fieldNum +"] == -1) {\n");
builder.append(" i = missingDataHandler().handleInt(\"").append(objectSchema.getName()).append("\", ordinal, \"").append(fieldName).append("\");\n");
builder.append(" } else {\n");
builder.append(" boxedFieldAccessSampler.recordFieldAccess(fieldIndex[" + fieldNum + "]);\n");
builder.append(" i = getTypeDataAccess().readInt(ordinal, fieldIndex[" + fieldNum + "]);\n");
builder.append(" }\n");
builder.append(" if(i == Integer.MIN_VALUE)\n");
builder.append(" return null;\n");
builder.append(" return Integer.valueOf(i);\n");
builder.append(" }\n\n");
return builder.toString();
}
private String generateBooleanFieldAccessor(int fieldNum) {
StringBuilder builder = new StringBuilder();
String fieldName = substituteInvalidChars(objectSchema.getFieldName(fieldNum));
builder.append(" public boolean get").append(uppercase(fieldName)).append("(int ordinal) {\n");
builder.append(" if(fieldIndex[" + fieldNum +"] == -1)\n");
builder.append(" return Boolean.TRUE.equals(missingDataHandler().handleBoolean(\"").append(objectSchema.getName()).append("\", ordinal, \"").append(fieldName).append("\"));\n");
builder.append(" return Boolean.TRUE.equals(getTypeDataAccess().readBoolean(ordinal, fieldIndex[" + fieldNum + "]));\n");
builder.append(" }\n\n");
builder.append(" public Boolean get").append(uppercase(fieldName)).append("Boxed(int ordinal) {\n");
builder.append(" if(fieldIndex[" + fieldNum +"] == -1)\n");
builder.append(" return missingDataHandler().handleBoolean(\"").append(objectSchema.getName()).append("\", ordinal, \"").append(fieldName).append("\");\n");
builder.append(" return getTypeDataAccess().readBoolean(ordinal, fieldIndex[" + fieldNum + "]);\n");
builder.append(" }\n\n");
return builder.toString();
}
}
| 9,323 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/api/HollowDataAccessorGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.api;
import com.netflix.hollow.api.codegen.CodeGeneratorConfig;
import com.netflix.hollow.api.codegen.HollowAPIGenerator;
import com.netflix.hollow.api.codegen.HollowCodeGenerationUtils;
import com.netflix.hollow.api.codegen.HollowConsumerJavaFileGenerator;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.api.consumer.data.AbstractHollowDataAccessor;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowSchema;
import java.util.Arrays;
/**
* This class contains template logic for generating a {@link HollowAPI} implementation. Not intended for external consumption.
*
* @see HollowAPIGenerator
*/
public class HollowDataAccessorGenerator extends HollowConsumerJavaFileGenerator {
public static final String SUB_PACKAGE_NAME = "accessor";
protected final String apiclassName;
protected final String type;
protected final HollowObjectSchema schema;
public HollowDataAccessorGenerator(String packageName, String apiclassName, HollowObjectSchema schema,
HollowDataset dataset, CodeGeneratorConfig config) {
super(packageName, SUB_PACKAGE_NAME, dataset, config);
this.className = getClassName(schema);
this.apiclassName = apiclassName;
this.type = hollowImplClassname(schema.getName());
this.schema = schema;
}
protected String getClassName(HollowObjectSchema schema) {
return HollowCodeGenerationUtils.upperFirstChar(schema.getName()) + "DataAccessor";
}
@Override
public String generate() {
StringBuilder builder = new StringBuilder();
appendPackageAndCommonImports(builder, apiclassName, Arrays.<HollowSchema>asList(schema));
builder.append("import " + HollowConsumer.class.getName() + ";\n");
builder.append("import " + AbstractHollowDataAccessor.class.getName() + ";\n");
builder.append("import " + PrimaryKey.class.getName() + ";\n");
builder.append("import " + HollowReadStateEngine.class.getName() + ";\n");
builder.append("\n");
builder.append("@SuppressWarnings(\"all\")\n");
builder.append("public class " + className + " extends " + AbstractHollowDataAccessor.class.getSimpleName() + "<" + type +"> {\n\n");
builder.append(" public static final String TYPE = \"" + schema.getName() + "\";\n");
builder.append(" private " + apiclassName + " api;\n\n");
genConstructors(builder);
genPublicAPIs(builder);
builder.append("}");
return builder.toString();
}
protected void genConstructors(StringBuilder builder) {
builder.append(" public " + className + "(HollowConsumer consumer) {\n");
builder.append(" super(consumer, TYPE);\n");
builder.append(" this.api = (" + apiclassName + ")consumer.getAPI();\n");
builder.append(" }\n\n");
builder.append(" public " + className + "(HollowReadStateEngine rStateEngine, " + apiclassName + " api) {\n");
builder.append(" super(rStateEngine, TYPE);\n");
builder.append(" this.api = api;\n");
builder.append(" }\n\n");
builder.append(" public " + className + "(HollowReadStateEngine rStateEngine, " + apiclassName + " api, String ... fieldPaths) {\n");
builder.append(" super(rStateEngine, TYPE, fieldPaths);\n");
builder.append(" this.api = api;\n");
builder.append(" }\n\n");
builder.append(" public " + className + "(HollowReadStateEngine rStateEngine, " + apiclassName + " api, PrimaryKey primaryKey) {\n");
builder.append(" super(rStateEngine, TYPE, primaryKey);\n");
builder.append(" this.api = api;\n");
builder.append(" }\n\n");
}
protected void genPublicAPIs(StringBuilder builder) {
builder.append(" @Override public " + type + " getRecord(int ordinal){\n");
builder.append(" return api.get" + type + "(ordinal);\n");
builder.append(" }\n\n");
}
} | 9,324 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/api/TypeAPISetJavaGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.api;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.delegateLookupClassname;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.typeAPIClassname;
import com.netflix.hollow.api.codegen.CodeGeneratorConfig;
import com.netflix.hollow.api.codegen.HollowAPIGenerator;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.api.custom.HollowSetTypeAPI;
import com.netflix.hollow.api.objects.delegate.HollowSetLookupDelegate;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.read.dataaccess.HollowSetTypeDataAccess;
import com.netflix.hollow.core.schema.HollowSetSchema;
/**
* This class contains template logic for generating a {@link HollowAPI} implementation. Not intended for external consumption.
*
* @see HollowAPIGenerator
*
* @author dkoszewnik
*
*/
public class TypeAPISetJavaGenerator extends HollowTypeAPIGenerator {
private final HollowSetSchema schema;
public TypeAPISetJavaGenerator(String apiClassname, String packageName, HollowSetSchema schema,
HollowDataset dataset, CodeGeneratorConfig config) {
super(apiClassname, packageName, schema, dataset, config);
this.schema = schema;
}
@Override
public String generate() {
StringBuilder builder = new StringBuilder();
appendPackageAndCommonImports(builder, apiClassname);
builder.append("import " + HollowSetTypeAPI.class.getName() + ";\n\n");
builder.append("import " + HollowSetTypeDataAccess.class.getName() + ";\n");
builder.append("import " + HollowSetLookupDelegate.class.getName() + ";\n");
builder.append("\n@SuppressWarnings(\"all\")\n");
builder.append("public class ").append(className).append(" extends HollowSetTypeAPI {\n\n");
builder.append(" private final ").append(delegateLookupClassname(schema)).append(" delegateLookupImpl;\n\n");
builder.append(" public ").append(className).append("(").append(apiClassname).append(" api, HollowSetTypeDataAccess dataAccess) {\n");
builder.append(" super(api, dataAccess);\n");
builder.append(" this.delegateLookupImpl = new ").append(delegateLookupClassname(schema)).append("(this);\n");
builder.append(" }\n\n");
builder.append(" public ").append(typeAPIClassname(schema.getElementType())).append(" getElementAPI() {\n");
builder.append(" return getAPI().get").append(typeAPIClassname(schema.getElementType())).append("();\n");
builder.append(" }\n\n");
builder.append(" public ").append(apiClassname).append(" getAPI() {\n");
builder.append(" return (").append(apiClassname).append(")api;\n");
builder.append(" }\n\n");
builder.append(" public ").append(delegateLookupClassname(schema)).append(" getDelegateLookupImpl() {\n");
builder.append(" return delegateLookupImpl;\n");
builder.append(" }\n\n");
builder.append("}");
return builder.toString();
}
}
| 9,325 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/api/HollowTypeAPIGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.hollow.api.codegen.api;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.typeAPIClassname;
import com.netflix.hollow.api.codegen.CodeGeneratorConfig;
import com.netflix.hollow.api.codegen.HollowConsumerJavaFileGenerator;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.schema.HollowSchema;
public abstract class HollowTypeAPIGenerator extends HollowConsumerJavaFileGenerator {
public static final String SUB_PACKAGE_NAME = "core";
protected final String apiClassname;
public HollowTypeAPIGenerator(String stateEngineClassname, String packageName, HollowSchema schema,
HollowDataset dataset, CodeGeneratorConfig config) {
super(packageName, SUB_PACKAGE_NAME, dataset, config);
this.apiClassname = stateEngineClassname;
this.className = typeAPIClassname(schema.getName());
}
} | 9,326 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/api/TypeAPIListJavaGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.api;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.delegateLookupClassname;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.typeAPIClassname;
import com.netflix.hollow.api.codegen.CodeGeneratorConfig;
import com.netflix.hollow.api.codegen.HollowAPIGenerator;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.api.custom.HollowListTypeAPI;
import com.netflix.hollow.api.objects.delegate.HollowListLookupDelegate;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.read.dataaccess.HollowListTypeDataAccess;
import com.netflix.hollow.core.schema.HollowListSchema;
/**
* This class contains template logic for generating a {@link HollowAPI} implementation. Not intended for external consumption.
*
* @see HollowAPIGenerator
*
* @author dkoszewnik
*
*/
public class TypeAPIListJavaGenerator extends HollowTypeAPIGenerator {
private final HollowListSchema schema;
public TypeAPIListJavaGenerator(String apiClassname, String packageName, HollowListSchema schema,
HollowDataset dataset, CodeGeneratorConfig config) {
super(apiClassname, packageName, schema, dataset, config);
this.schema = schema;
}
@Override
public String generate() {
StringBuilder builder = new StringBuilder();
appendPackageAndCommonImports(builder, apiClassname);
builder.append("import " + HollowListTypeAPI.class.getName() + ";\n\n");
builder.append("import " + HollowListTypeDataAccess.class.getName() + ";\n");
builder.append("import " + HollowListLookupDelegate.class.getName() + ";\n");
builder.append("\n@SuppressWarnings(\"all\")\n");
builder.append("public class ").append(className).append(" extends HollowListTypeAPI {\n\n");
builder.append(" private final ").append(delegateLookupClassname(schema)).append(" delegateLookupImpl;\n\n");
builder.append(" public ").append(className).append("(").append(apiClassname).append(" api, HollowListTypeDataAccess dataAccess) {\n");
builder.append(" super(api, dataAccess);\n");
builder.append(" this.delegateLookupImpl = new ").append(delegateLookupClassname(schema)).append("(this);\n");
builder.append(" }\n\n");
builder.append(" public ").append(typeAPIClassname(schema.getElementType())).append(" getElementAPI() {\n");
builder.append(" return getAPI().get").append(typeAPIClassname(schema.getElementType())).append("();\n");
builder.append(" }\n\n");
builder.append(" public ").append(delegateLookupClassname(schema)).append(" getDelegateLookupImpl() {\n");
builder.append(" return delegateLookupImpl;\n");
builder.append(" }\n\n");
builder.append(" public ").append(apiClassname).append(" getAPI() {\n");
builder.append(" return (").append(apiClassname).append(")api;\n");
builder.append(" }\n\n");
builder.append("}");
return builder.toString();
}
} | 9,327 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/api/TypeAPIMapJavaGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.api;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.delegateLookupClassname;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.typeAPIClassname;
import com.netflix.hollow.api.codegen.CodeGeneratorConfig;
import com.netflix.hollow.api.codegen.HollowAPIGenerator;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.api.custom.HollowMapTypeAPI;
import com.netflix.hollow.api.objects.delegate.HollowMapLookupDelegate;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.read.dataaccess.HollowMapTypeDataAccess;
import com.netflix.hollow.core.schema.HollowMapSchema;
/**
* This class contains template logic for generating a {@link HollowAPI} implementation. Not intended for external consumption.
*
* @see HollowAPIGenerator
*
* @author dkoszewnik
*
*/
public class TypeAPIMapJavaGenerator extends HollowTypeAPIGenerator {
private final HollowMapSchema schema;
public TypeAPIMapJavaGenerator(String apiClassname, String packageName, HollowMapSchema schema,
HollowDataset dataset, CodeGeneratorConfig config) {
super(apiClassname, packageName, schema, dataset, config);
this.schema = schema;
}
@Override
public String generate() {
StringBuilder builder = new StringBuilder();
appendPackageAndCommonImports(builder, apiClassname);
builder.append("import " + HollowMapTypeAPI.class.getName() + ";\n\n");
builder.append("import " + HollowMapTypeDataAccess.class.getName() + ";\n");
builder.append("import " + HollowMapLookupDelegate.class.getName() + ";\n");
builder.append("\n@SuppressWarnings(\"all\")\n");
builder.append("public class ").append(className).append(" extends HollowMapTypeAPI {\n\n");
builder.append(" private final ").append(delegateLookupClassname(schema)).append(" delegateLookupImpl;\n\n");
builder.append(" public ").append(className).append("(").append(apiClassname).append(" api, HollowMapTypeDataAccess dataAccess) {\n");
builder.append(" super(api, dataAccess);\n");
builder.append(" this.delegateLookupImpl = new ").append(delegateLookupClassname(schema)).append("(this);\n");
builder.append(" }\n\n");
builder.append(" public ").append(typeAPIClassname(schema.getKeyType())).append(" getKeyAPI() {\n");
builder.append(" return getAPI().get").append(typeAPIClassname(schema.getKeyType())).append("();\n");
builder.append(" }\n\n");
builder.append(" public ").append(typeAPIClassname(schema.getValueType())).append(" getValueAPI() {\n");
builder.append(" return getAPI().get").append(typeAPIClassname(schema.getValueType())).append("();\n");
builder.append(" }\n\n");
builder.append(" public ").append(delegateLookupClassname(schema)).append(" getDelegateLookupImpl() {\n");
builder.append(" return delegateLookupImpl;\n");
builder.append(" }\n\n");
builder.append(" public ").append(apiClassname).append(" getAPI() {\n");
builder.append(" return (").append(apiClassname).append(")api;\n");
builder.append(" }\n\n");
builder.append("}");
return builder.toString();
}
}
| 9,328 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/perfapi/HollowObjectTypePerfAPIClassGenerator.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.perfapi;
import com.netflix.hollow.api.codegen.HollowCodeGenerationUtils;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import java.util.Set;
class HollowObjectTypePerfAPIClassGenerator {
private final HollowObjectSchema schema;
private final String packageName;
private final Set<String> checkFieldExistsMethods;
public HollowObjectTypePerfAPIClassGenerator(HollowObjectSchema schema, String packageName, Set<String> checkFieldExistsMethods) {
this.schema = schema;
this.packageName = packageName;
this.checkFieldExistsMethods = checkFieldExistsMethods;
}
public String generate() {
StringBuilder builder = new StringBuilder();
builder.append("package " + packageName + ";\n\n");
builder.append("import com.netflix.hollow.api.perfapi.HollowObjectTypePerfAPI;\n" +
"import com.netflix.hollow.api.perfapi.HollowPerformanceAPI;\n" +
"import com.netflix.hollow.api.perfapi.Ref;\n" +
"import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;\n\n");
builder.append("@SuppressWarnings(\"all\")\n");
builder.append("public class " + schema.getName() + "PerfAPI extends HollowObjectTypePerfAPI {\n\n");
builder.append(" public static final String fieldNames[] = { ");
for(int i=0;i<schema.numFields();i++) {
if(i > 0)
builder.append(", ");
builder.append("\"" + schema.getFieldName(i) + "\"");
}
builder.append(" };\n\n");
builder.append(" public " + schema.getName() + "PerfAPI(HollowDataAccess dataAccess, String typeName, HollowPerformanceAPI api) {\n");
builder.append(" super(dataAccess, typeName, api, fieldNames);\n");
builder.append(" }\n\n");
for(int i=0;i<schema.numFields();i++) {
FieldType fieldType = schema.getFieldType(i);
String fieldName = schema.getFieldName(i);
String referencedType = schema.getReferencedType(i);
appendFieldMethod(builder, fieldType, fieldName, i, referencedType);
}
builder.append("}");
return builder.toString();
}
public void appendFieldMethod(StringBuilder builder, FieldType fieldType, String fieldName, int fieldIdx, String referencedType) {
String type = fieldType.name();
if(fieldType == FieldType.REFERENCE)
type += " (" + referencedType + ")";
builder.append(" /**\n" +
" * <i>"+schema.getName() + "." + fieldName +"</i><br/>\n" +
" * <b>" + type + "</b>\n" +
" */\n");
switch(fieldType) {
case INT:
builder.append(" public int get" + HollowCodeGenerationUtils.upperFirstChar(fieldName) + "(long ref) {\n");
builder.append(" return typeAccess.readInt(ordinal(ref), fieldIdx[" + fieldIdx + "]);\n");
builder.append(" }\n\n");
builder.append(" public Integer get" + HollowCodeGenerationUtils.upperFirstChar(fieldName) + "Boxed(long ref) {\n");
builder.append(" int val = typeAccess.readInt(ordinal(ref), fieldIdx[" + fieldIdx + "]);\n");
builder.append(" if(val == Integer.MIN_VALUE)\n");
builder.append(" return null;\n");
builder.append(" return val;\n");
builder.append(" }\n\n");
break;
case LONG:
builder.append(" public long get" + HollowCodeGenerationUtils.upperFirstChar(fieldName) + "(long ref) {\n");
builder.append(" return typeAccess.readLong(ordinal(ref), fieldIdx[" + fieldIdx + "]);\n");
builder.append(" }\n\n");
builder.append(" public Long get" + HollowCodeGenerationUtils.upperFirstChar(fieldName) + "Boxed(long ref) {\n");
builder.append(" long val = typeAccess.readLong(ordinal(ref), fieldIdx[" + fieldIdx + "]);\n");
builder.append(" if(val == Long.MIN_VALUE)\n");
builder.append(" return null;\n");
builder.append(" return val;\n");
builder.append(" }\n\n");
break;
case FLOAT:
builder.append(" public float get" + HollowCodeGenerationUtils.upperFirstChar(fieldName) + "(long ref) {\n");
builder.append(" return typeAccess.readFloat(ordinal(ref), fieldIdx[" + fieldIdx + "]);\n");
builder.append(" }\n\n");
builder.append(" public Float get" + HollowCodeGenerationUtils.upperFirstChar(fieldName) + "Boxed(long ref) {\n");
builder.append(" float val = typeAccess.readFloat(ordinal(ref), fieldIdx[" + fieldIdx + "]);\n");
builder.append(" if(Float.isNaN(val))\n");
builder.append(" return null;\n");
builder.append(" return val;\n");
builder.append(" }\n\n");
break;
case DOUBLE:
builder.append(" public double get" + HollowCodeGenerationUtils.upperFirstChar(fieldName) + "(long ref) {\n");
builder.append(" return typeAccess.readDouble(ordinal(ref), fieldIdx[" + fieldIdx + "]);\n");
builder.append(" }\n\n");
builder.append(" public Double get" + HollowCodeGenerationUtils.upperFirstChar(fieldName) + "Boxed(long ref) {\n");
builder.append(" double val = typeAccess.readDouble(ordinal(ref), fieldIdx[" + fieldIdx + "]);\n");
builder.append(" if(Double.isNaN(val))\n");
builder.append(" return null;\n");
builder.append(" return val;\n");
builder.append(" }\n\n");
break;
case BOOLEAN:
builder.append(" public boolean get" + HollowCodeGenerationUtils.upperFirstChar(fieldName) + "(long ref) {\n");
builder.append(" return Boolean.TRUE.equals(typeAccess.readBoolean(ordinal(ref), fieldIdx[" + fieldIdx + "]));\n");
builder.append(" }\n\n");
builder.append(" public Boolean get" + HollowCodeGenerationUtils.upperFirstChar(fieldName) + "Boxed(long ref) {\n");
builder.append(" return typeAccess.readBoolean(ordinal(ref), fieldIdx[" + fieldIdx + "]);\n");
builder.append(" }\n\n");
break;
case STRING:
builder.append(" public String get" + HollowCodeGenerationUtils.upperFirstChar(fieldName) + "(long ref) {\n");
builder.append(" return typeAccess.readString(ordinal(ref), fieldIdx[" + fieldIdx + "]);\n");
builder.append(" }\n\n");
builder.append(" public boolean is" + HollowCodeGenerationUtils.upperFirstChar(fieldName) + "Equal(long ref, String testValue) {\n");
builder.append(" return typeAccess.isStringFieldEqual(ordinal(ref), fieldIdx[" + fieldIdx + "], testValue);\n");
builder.append(" }\n\n");
break;
case BYTES:
builder.append(" public byte[] get" + HollowCodeGenerationUtils.upperFirstChar(fieldName) + "(long ref) {\n");
builder.append(" return typeAccess.readBytes(ordinal(ref), fieldIdx[" + fieldIdx + "]);\n");
builder.append(" }\n\n");
break;
case REFERENCE:
builder.append(" public long get" + HollowCodeGenerationUtils.upperFirstChar(fieldName) + "Ref(long ref) {\n");
builder.append(" return Ref.toRefWithTypeMasked(refMaskedTypeIdx[" + fieldIdx + "], typeAccess.readOrdinal(ordinal(ref), fieldIdx[" + fieldIdx + "]));\n");
builder.append(" }\n\n");
break;
}
if(checkFieldExistsMethods.contains(schema.getName() + "." + fieldName)) {
builder.append(" public boolean " + fieldName + "FieldExists() {\n");
builder.append(" return fieldIdx[" + fieldIdx + "] != -1;\n");
builder.append(" }\n\n");
}
}
}
| 9,329 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/perfapi/HollowPerformanceAPIGenerator.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.perfapi;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.schema.HollowSchema.SchemaType;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
public class HollowPerformanceAPIGenerator {
private HollowDataset dataset;
private String apiClassname;
private String packageName;
private Path destinationPath;
private Set<String> checkFieldExistsMethods = new HashSet<>();
public static Builder newBuilder() {
HollowPerformanceAPIGenerator gen = new HollowPerformanceAPIGenerator();
return gen.theBuilder();
}
private Builder theBuilder() {
return new Builder();
}
public class Builder {
public Builder withDataset(HollowDataset dataset) {
HollowPerformanceAPIGenerator.this.dataset = dataset;
return this;
}
public Builder withAPIClassname(String apiClassname) {
HollowPerformanceAPIGenerator.this.apiClassname = apiClassname;
return this;
}
public Builder withPackageName(String packageName) {
HollowPerformanceAPIGenerator.this.packageName = packageName;
return this;
}
public Builder withDestination(String destinationPath) {
return withDestination(Paths.get(destinationPath));
}
public Builder withDestination(Path destinationPath) {
HollowPerformanceAPIGenerator.this.destinationPath = destinationPath;
return this;
}
public Builder withCheckFieldExistsMethods(Set<String> checkFieldExistsMethods) {
HollowPerformanceAPIGenerator.this.checkFieldExistsMethods.addAll(checkFieldExistsMethods);
return this;
}
public Builder withCheckFieldExistsMethods(String... checkFieldExistsMethods) {
HollowPerformanceAPIGenerator.this.checkFieldExistsMethods.addAll(Arrays.asList(checkFieldExistsMethods));
return this;
}
public HollowPerformanceAPIGenerator build() {
return HollowPerformanceAPIGenerator.this;
}
}
public void generateSourceFiles() throws IOException {
generate(dataset, packageName, apiClassname, destinationPath, checkFieldExistsMethods);
}
private void generate(HollowDataset dataset, String packageName, String apiClassName, Path destination, Set<String> checkFieldExistsMethods) throws IOException {
Path packagePath = Paths.get(packageName.replace(".", File.separator));
if (!destination.toAbsolutePath().endsWith(packagePath)) {
destination = destination.resolve(packagePath);
}
Path apiClassDestination = destination.resolve(apiClassName + ".java");
if (!Files.exists(apiClassDestination)) {
Files.createDirectories(destination);
}
String apiClassContent = new HollowPerformanceAPIClassGenerator(dataset, apiClassName, packageName).generate();
try (FileWriter writer = new FileWriter(apiClassDestination.toFile())) {
writer.write(apiClassContent);
}
for (HollowSchema schema : dataset.getSchemas()) {
if (schema.getSchemaType() == SchemaType.OBJECT) {
Path objClassDestination = destination.resolve(schema.getName() + "PerfAPI.java");
String objClassContent = new HollowObjectTypePerfAPIClassGenerator((HollowObjectSchema) schema, packageName, checkFieldExistsMethods).generate();
try (FileWriter writer = new FileWriter(objClassDestination.toFile())) {
writer.write(objClassContent);
}
}
}
}
} | 9,330 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/perfapi/HollowPerformanceAPIClassGenerator.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.perfapi;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.schema.HollowSchema;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
class HollowPerformanceAPIClassGenerator {
private final HollowDataset dataset;
private final String apiClassName;
private final String packageName;
public HollowPerformanceAPIClassGenerator(HollowDataset dataset, String apiClassName, String packageName) {
this.dataset = dataset;
this.apiClassName = apiClassName;
this.packageName = packageName;
}
public String generate() {
StringBuilder builder = new StringBuilder();
builder.append("package " + packageName + ";\n\n");
builder.append("import com.netflix.hollow.api.perfapi.HollowListTypePerfAPI;\n" +
"import com.netflix.hollow.api.perfapi.HollowMapTypePerfAPI;\n" +
"import com.netflix.hollow.api.perfapi.HollowPerformanceAPI;\n" +
"import com.netflix.hollow.api.perfapi.HollowSetTypePerfAPI;\n" +
"import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;\n" +
"import com.netflix.hollow.core.read.dataaccess.HollowListTypeDataAccess;\n" +
"import com.netflix.hollow.core.read.dataaccess.HollowMapTypeDataAccess;\n" +
"import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess;\n" +
"import com.netflix.hollow.core.read.dataaccess.HollowSetTypeDataAccess;\n" +
"import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess;\n" +
"import java.util.Set;\n" +
"\n");
builder.append("@SuppressWarnings(\"all\")\n");
builder.append("public class " + apiClassName + " extends HollowPerformanceAPI {\n\n");
List<HollowSchema> schemas = new ArrayList<>(dataset.getSchemas());
schemas.sort(Comparator.comparing(HollowSchema::getName));
for(HollowSchema schema : schemas) {
String schemaName = schema.getName();
switch(schema.getSchemaType()) {
case OBJECT:
builder.append(" public final " + schemaName + "PerfAPI " + schemaName + ";\n");
break;
case LIST:
builder.append(" public final HollowListTypePerfAPI " + schemaName + ";\n");
break;
case SET:
builder.append(" public final HollowSetTypePerfAPI " + schemaName + ";\n");
break;
case MAP:
builder.append(" public final HollowMapTypePerfAPI " + schemaName + ";\n");
break;
}
}
builder.append("\n");
builder.append(" public " + apiClassName + "(HollowDataAccess dataAccess) {\n");
builder.append(" super(dataAccess);\n\n");
for(HollowSchema schema : schemas) {
String schemaName = schema.getName();
switch (schema.getSchemaType()) {
case OBJECT:
builder.append(" this." + schemaName + " = new " + schemaName + "PerfAPI(dataAccess, \"" + schemaName + "\", this);\n");
break;
case LIST:
builder.append(" this." + schemaName + " = new HollowListTypePerfAPI(dataAccess, \"" + schemaName + "\", this);\n");
break;
case MAP:
builder.append(" this." + schemaName + " = new HollowMapTypePerfAPI(dataAccess, \"" + schemaName + "\", this);\n");
break;
case SET:
builder.append(" this." + schemaName + " = new HollowSetTypePerfAPI(dataAccess, \"" + schemaName + "\", this);\n");
break;
}
}
builder.append(" }\n");
builder.append("}");
return builder.toString();
}
} | 9,331 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/indexes/HollowHashIndexGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.indexes;
import static com.netflix.hollow.api.codegen.HollowCodeGenerationUtils.substituteInvalidChars;
import com.netflix.hollow.api.codegen.CodeGeneratorConfig;
import com.netflix.hollow.api.codegen.HollowAPIGenerator;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.api.consumer.data.AbstractHollowOrdinalIterable;
import com.netflix.hollow.api.consumer.index.AbstractHollowHashIndex;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.index.HollowHashIndexResult;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.schema.HollowSchemaSorter;
import java.util.Collections;
import java.util.List;
/**
* This class contains template logic for generating a {@link HollowAPI} implementation. Not intended for external consumption.
*
* @see HollowAPIGenerator
*
*/
public class HollowHashIndexGenerator extends HollowIndexGenerator {
private final HollowDataset dataset;
private final boolean isListenToDataRefreah;
public HollowHashIndexGenerator(String packageName, String apiClassname, HollowDataset dataset, CodeGeneratorConfig config) {
super(packageName, apiClassname, dataset, config);
this.className = apiClassname + "HashIndex";
this.dataset = dataset;
this.isListenToDataRefreah = config.isListenToDataRefresh();
}
@Override
public String generate() {
List<HollowSchema> schemaList = HollowSchemaSorter.dependencyOrderedSchemaList(dataset);
StringBuilder builder = new StringBuilder();
appendPackageAndCommonImports(builder, apiClassname, schemaList);
builder.append("import " + HollowConsumer.class.getName() + ";\n");
builder.append("import " + HollowHashIndexResult.class.getName() + ";\n");
builder.append("import " + Collections.class.getName() + ";\n");
builder.append("import " + Iterable.class.getName() + ";\n");
builder.append("import " + AbstractHollowHashIndex.class.getName() + ";\n");
builder.append("import " + AbstractHollowOrdinalIterable.class.getName() + ";\n\n");
builder.append("\n");
builder.append("/**\n");
genDeprecatedJavaDoc(schemaList, builder);
builder.append(" */\n");
builder.append("@Deprecated\n");
builder.append("@SuppressWarnings(\"all\")\n");
builder.append("public class " + className + " extends " + AbstractHollowHashIndex.class.getSimpleName() + "<" + apiClassname + "> {\n\n");
builder.append(" public " + className + "(HollowConsumer consumer, String queryType, String selectFieldPath, String... matchFieldPaths) {\n");
builder.append(" super(consumer, " + isListenToDataRefreah +", queryType, selectFieldPath, matchFieldPaths);\n");
builder.append(" }\n\n");
builder.append(" public " + className + "(HollowConsumer consumer, boolean isListenToDataRefresh, String queryType, String selectFieldPath, String... matchFieldPaths) {\n");
builder.append(" super(consumer, isListenToDataRefresh, queryType, selectFieldPath, matchFieldPaths);\n");
builder.append(" }\n\n");
for(HollowSchema schema : schemaList) {
builder.append(" public Iterable<" + hollowImplClassname(schema.getName()) + "> find" + substituteInvalidChars(schema.getName()) + "Matches(Object... keys) {\n");
builder.append(" HollowHashIndexResult matches = idx.findMatches(keys);\n");
builder.append(" if(matches == null) return Collections.emptySet();\n\n");
builder.append(" return new AbstractHollowOrdinalIterable<" + hollowImplClassname(schema.getName()) + ">(matches.iterator()) {\n");
builder.append(" public " + hollowImplClassname(schema.getName()) + " getData(int ordinal) {\n");
builder.append(" return api.get" + hollowImplClassname(schema.getName()) + "(ordinal);\n");
builder.append(" }\n");
builder.append(" };\n");
builder.append(" }\n\n");
}
builder.append("}");
return builder.toString();
}
private void genDeprecatedJavaDoc(List<HollowSchema> schemaList, StringBuilder builder) {
if (schemaList.isEmpty()) return;
HollowSchema schema = schemaList.get(0);
String typeName = hollowImplClassname(schema.getName());
builder.append(" * @deprecated see {@link com.netflix.hollow.api.consumer.index.HashIndex} which can be built as follows:\n");
builder.append(" * <pre>{@code\n");
builder.append(String.format(" * HashIndex<%s, K> uki = HashIndex.from(consumer, %1$s.class)\n", typeName));
builder.append(" * .usingBean(k);\n");
builder.append(String.format(" * Stream<%s> results = uki.findMatches(k);\n", typeName));
builder.append(" * }</pre>\n");
builder.append(" * where {@code K} is a class declaring key field paths members, annotated with\n");
builder.append(" * {@link com.netflix.hollow.api.consumer.index.FieldPath}, and {@code k} is an instance of\n");
builder.append(String.format(" * {@code K} that is the query to find the matching {@code %s} objects.\n", typeName));
}
}
| 9,332 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/indexes/HollowIndexGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.hollow.api.codegen.indexes;
import com.netflix.hollow.api.codegen.CodeGeneratorConfig;
import com.netflix.hollow.api.codegen.HollowConsumerJavaFileGenerator;
import com.netflix.hollow.core.HollowDataset;
public abstract class HollowIndexGenerator extends HollowConsumerJavaFileGenerator {
public static final String SUB_PACKAGE_NAME = "index";
protected final String apiClassname;
public HollowIndexGenerator(String packageName, String apiClassname, HollowDataset dataset,
CodeGeneratorConfig config) {
super(packageName, SUB_PACKAGE_NAME, dataset, config);
this.apiClassname = apiClassname;
}
} | 9,333 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/indexes/LegacyHollowPrimaryKeyIndexGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.indexes;
import com.netflix.hollow.api.codegen.CodeGeneratorConfig;
import com.netflix.hollow.api.codegen.HollowAPIGenerator;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.schema.HollowObjectSchema;
/**
* This class contains template logic for generating a {@link HollowAPI} implementation. Not intended for external consumption.
*
* @see HollowAPIGenerator
*
*/
public class LegacyHollowPrimaryKeyIndexGenerator extends HollowUniqueKeyIndexGenerator {
public LegacyHollowPrimaryKeyIndexGenerator(String packageName, String apiClassname, HollowObjectSchema schema,
HollowDataset dataset, CodeGeneratorConfig config) {
super(packageName, apiClassname, schema, dataset, config);
isGenSimpleConstructor = true;
isParameterizedConstructorPublic = true;
isAutoListenToDataRefresh = true;
}
@Override
protected String getClassName(HollowObjectSchema schema) {
return schema.getName() + "PrimaryKeyIndex";
}
} | 9,334 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/indexes/HollowPrimaryKeyIndexGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.indexes;
import com.netflix.hollow.api.codegen.CodeGeneratorConfig;
import com.netflix.hollow.api.codegen.HollowAPIGenerator;
import com.netflix.hollow.api.codegen.HollowCodeGenerationUtils;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import java.util.ArrayList;
import java.util.List;
/**
* This class contains template logic for generating a {@link HollowAPI} implementation. Not intended for external consumption.
*
* @see HollowAPIGenerator
*/
public class HollowPrimaryKeyIndexGenerator extends HollowUniqueKeyIndexGenerator {
protected final PrimaryKey pk;
public HollowPrimaryKeyIndexGenerator(HollowDataset dataset, String packageName, String apiClassname, HollowObjectSchema schema, CodeGeneratorConfig config) {
super(packageName, apiClassname, schema, dataset, config);
this.pk = schema.getPrimaryKey();
isGenSimpleConstructor = true;
isParameterizedConstructorPublic = false;
isAutoListenToDataRefresh = false;
isImplementsUniqueKeyIndex = false;
}
@Override
protected String getClassName(HollowObjectSchema schema) {
return schema.getName() + "PrimaryKeyIndex";
}
@Override
protected void genFindMatchAPI(StringBuilder builder) {
List<String> params = new ArrayList<>();
List<String> fieldNames = new ArrayList<>();
for (int i = 0; i < pk.numFields(); i++) {
String fp = pk.getFieldPath(i);
String fn = HollowCodeGenerationUtils.normalizeFieldPathToParamName(fp);
fieldNames.add(fn);
FieldType ft = pk.getFieldType(dataset, i);
if (FieldType.REFERENCE.equals(ft)) {
HollowObjectSchema refSchema = pk.getFieldSchema(dataset, i);
params.add(refSchema.getName() + " " + fn);
} else {
params.add(HollowCodeGenerationUtils.getJavaScalarType(ft) + " " + fn);
}
}
StringBuilder paramsAsStr = new StringBuilder();
StringBuilder fieldNamesAsStr = new StringBuilder();
for (int i = 0; i < params.size(); i++) {
if (i > 0) {
paramsAsStr.append(", ");
fieldNamesAsStr.append(", ");
}
paramsAsStr.append(params.get(i));
fieldNamesAsStr.append(fieldNames.get(i));
}
builder.append(" public " + hollowImplClassname(schema.getName()) + " findMatch(" + paramsAsStr + ") {\n");
builder.append(" int ordinal = idx.getMatchingOrdinal(" + fieldNamesAsStr + ");\n");
builder.append(" if(ordinal == -1)\n");
builder.append(" return null;\n");
builder.append(" return api.get" + hollowImplClassname(schema.getName()) + "(ordinal);\n");
builder.append(" }\n\n");
}
@Override
protected void genDeprecatedJavaDoc(StringBuilder builder) {
String typeName = hollowImplClassname(type);
builder.append(" * @deprecated see {@link com.netflix.hollow.api.consumer.index.UniqueKeyIndex} which can be created as follows:\n");
builder.append(" * <pre>{@code\n");
if (pk.numFields() > 1) {
builder.append(String.format(" * UniqueKeyIndex<%s, %1$s.Key> uki = %1$s.uniqueIndex(consumer);\n", typeName));
builder.append(String.format(" * %s.Key k = new %1$s.Key(...);\n", typeName));
builder.append(String.format(" * %s m = uki.findMatch(k);\n", typeName));
} else {
FieldType ft = pk.getFieldType(dataset, 0);
String keyName;
if (FieldType.REFERENCE.equals(ft)) {
HollowObjectSchema refSchema = pk.getFieldSchema(dataset, 0);
keyName = hollowImplClassname(refSchema.getName());
} else {
keyName = HollowCodeGenerationUtils.getJavaScalarType(ft);
}
builder.append(String.format(" * UniqueKeyIndex<%1$s, %2$s> uki = %1$s.uniqueIndex(consumer);\n", typeName, keyName));
builder.append(String.format(" * %s k = ...;\n", keyName));
builder.append(String.format(" * %s m = uki.findMatch(k);\n", typeName));
}
builder.append(" * }</pre>\n");
builder.append(String.format(" * @see %s#uniqueIndex\n", typeName));
}
} | 9,335 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/codegen/indexes/HollowUniqueKeyIndexGenerator.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.codegen.indexes;
import com.netflix.hollow.api.codegen.CodeGeneratorConfig;
import com.netflix.hollow.api.codegen.HollowAPIGenerator;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.api.consumer.index.AbstractHollowUniqueKeyIndex;
import com.netflix.hollow.api.consumer.index.HollowUniqueKeyIndex;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowSchema;
import java.util.Arrays;
/**
* This class contains template logic for generating a {@link HollowAPI} implementation. Not intended for external consumption.
*
* @see HollowAPIGenerator
*/
public class HollowUniqueKeyIndexGenerator extends HollowIndexGenerator {
protected final HollowObjectSchema schema;
protected final String type;
protected boolean isGenSimpleConstructor = false;
protected boolean isParameterizedConstructorPublic = true;
protected boolean isAutoListenToDataRefresh = false;
protected boolean isImplementsUniqueKeyIndex = true;
public HollowUniqueKeyIndexGenerator(String packageName, String apiClassname, HollowObjectSchema schema,
HollowDataset dataset, CodeGeneratorConfig config) {
super(packageName, apiClassname, dataset, config);
this.type = schema.getName();
this.className = getClassName(schema);
this.schema = schema;
}
protected String getClassName(HollowObjectSchema schema) {
return schema.getName() + "UniqueKeyIndex";
}
@Override
public String generate() {
StringBuilder builder = new StringBuilder();
appendPackageAndCommonImports(builder, apiClassname, Arrays.<HollowSchema>asList(schema));
builder.append("import " + HollowConsumer.class.getName() + ";\n");
builder.append("import " + AbstractHollowUniqueKeyIndex.class.getName() + ";\n");
builder.append("import " + HollowUniqueKeyIndex.class.getName() + ";\n");
if (isGenSimpleConstructor)
builder.append("import " + HollowObjectSchema.class.getName() + ";\n");
builder.append("\n/**\n");
genDeprecatedJavaDoc(builder);
builder.append(" */\n");
builder.append("@Deprecated\n");
builder.append("@SuppressWarnings(\"all\")\n");
builder.append("public class " + className + " extends " + AbstractHollowUniqueKeyIndex.class.getSimpleName() + "<" + apiClassname + ", " + hollowImplClassname(type) + "> ");
if (isImplementsUniqueKeyIndex) {
builder.append("implements " + HollowUniqueKeyIndex.class.getSimpleName() + "<" + hollowImplClassname(type) + "> ");
}
builder.append("{\n\n");
{
genConstructors(builder);
genPublicAPIs(builder);
}
builder.append("}");
return builder.toString();
}
protected void genConstructors(StringBuilder builder) {
if (isGenSimpleConstructor)
genSimpleConstructor(builder);
genParameterizedConstructor(builder);
}
protected void genSimpleConstructor(StringBuilder builder) {
builder.append(" public " + className + "(HollowConsumer consumer) {\n");
builder.append(" this(consumer, "+ isAutoListenToDataRefresh + ");\n");
builder.append(" }\n\n");
builder.append(" public " + className + "(HollowConsumer consumer, boolean isListenToDataRefresh) {\n");
builder.append(" this(consumer, isListenToDataRefresh, ((HollowObjectSchema)consumer.getStateEngine().getNonNullSchema(\"" + type + "\")).getPrimaryKey().getFieldPaths());\n");
builder.append(" }\n\n");
}
protected void genParameterizedConstructor(StringBuilder builder) {
builder.append(" " + (isParameterizedConstructorPublic ? "public " : "private ") + className + "(HollowConsumer consumer, String... fieldPaths) {\n");
builder.append(" this(consumer, "+ isAutoListenToDataRefresh + ", fieldPaths);\n");
builder.append(" }\n\n");
builder.append(" " + (isParameterizedConstructorPublic ? "public " : "private ") + className + "(HollowConsumer consumer, boolean isListenToDataRefresh, String... fieldPaths) {\n");
builder.append(" super(consumer, \"" + type + "\", isListenToDataRefresh, fieldPaths);\n");
builder.append(" }\n\n");
}
protected void genPublicAPIs(StringBuilder builder) {
genFindMatchAPI(builder);
}
protected void genFindMatchAPI(StringBuilder builder) {
if (isImplementsUniqueKeyIndex)
builder.append(" @Override\n");
builder.append(" public " + hollowImplClassname(type) + " findMatch(Object... keys) {\n");
builder.append(" int ordinal = idx.getMatchingOrdinal(keys);\n");
builder.append(" if(ordinal == -1)\n");
builder.append(" return null;\n");
builder.append(" return api.get" + hollowImplClassname(type) + "(ordinal);\n");
builder.append(" }\n\n");
}
protected void genDeprecatedJavaDoc(StringBuilder builder) {
String typeName = hollowImplClassname(type);
builder.append(" * @deprecated see {@link com.netflix.hollow.api.consumer.index.UniqueKeyIndex} which can be built as follows:\n");
builder.append(" * <pre>{@code\n");
builder.append(String.format(" * UniqueKeyIndex<%s, K> uki = UniqueKeyIndex.from(consumer, %1$s.class)\n", typeName));
builder.append(" * .usingBean(k);\n");
builder.append(String.format(" * %s m = uki.findMatch(k);\n", typeName));
builder.append(" * }</pre>\n");
builder.append(" * where {@code K} is a class declaring key field paths members, annotated with\n");
builder.append(" * {@link com.netflix.hollow.api.consumer.index.FieldPath}, and {@code k} is an instance of\n");
builder.append(String.format(" * {@code K} that is the key to find the unique {@code %s} object.\n", typeName));
}
}
| 9,336 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/common/Listeners.java | package com.netflix.hollow.api.common;
import com.netflix.hollow.api.producer.listener.VetoableListener;
import java.util.Arrays;
import java.util.function.Consumer;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.Stream;
public abstract class Listeners {
private static final Logger LOG = Logger.getLogger(Listeners.class.getName());
protected final EventListener[] listeners;
protected Listeners(EventListener[] listeners) {
this.listeners = listeners;
}
public <T extends EventListener> Stream<T> getListeners(Class<T> c) {
return Arrays.stream(listeners).filter(c::isInstance).map(c::cast);
}
protected <T extends EventListener> void fire(
Class<T> c, Consumer<? super T> r) {
fireStream(getListeners(c), r);
}
protected <T extends EventListener> void fireStream(
Stream<T> s, Consumer<? super T> r) {
s.forEach(l -> {
try {
r.accept(l);
} catch (VetoableListener.ListenerVetoException e) {
throw e;
} catch (RuntimeException e) {
if (l instanceof VetoableListener) {
throw e;
}
LOG.log(Level.WARNING, "Error executing listener", e);
}
});
}
}
| 9,337 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/common/ListenerSupport.java | package com.netflix.hollow.api.common;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
public class ListenerSupport {
protected final CopyOnWriteArrayList<EventListener> eventListeners;
public ListenerSupport() {
eventListeners = new CopyOnWriteArrayList<>();
}
public ListenerSupport(List<? extends EventListener> listeners) {
eventListeners = new CopyOnWriteArrayList<>(listeners);
}
public ListenerSupport(ListenerSupport that) {
eventListeners = new CopyOnWriteArrayList<>(that.eventListeners);
}
public void addListener(EventListener listener) {
eventListeners.addIfAbsent(listener);
}
public void removeListener(EventListener listener) {
eventListeners.remove(listener);
}
}
| 9,338 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/common/EventListener.java | package com.netflix.hollow.api.common;
/**
* The top-level type for all listeners.
*/
public interface EventListener {
}
| 9,339 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer/HollowConsumerAPI.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.hollow.api.consumer;
import com.netflix.hollow.core.type.HBoolean;
import com.netflix.hollow.core.type.HDouble;
import com.netflix.hollow.core.type.HFloat;
import com.netflix.hollow.core.type.HInteger;
import com.netflix.hollow.core.type.HLong;
import com.netflix.hollow.core.type.HString;
import java.util.Collection;
public interface HollowConsumerAPI {
public interface BooleanRetriever {
public Collection<HBoolean> getAllHBoolean();
public HBoolean getHBoolean(int ordinal);
}
public interface DoubleRetriever {
public Collection<HDouble> getAllHDouble();
public HDouble getHDouble(int ordinal);
}
public interface FloatRetriever {
public Collection<HFloat> getAllHFloat();
public HFloat getHFloat(int ordinal);
}
public interface IntegerRetriever {
public Collection<HInteger> getAllHInteger();
public HInteger getHInteger(int ordinal);
}
public interface LongRetriever {
public Collection<HLong> getAllHLong();
public HLong getHLong(int ordinal);
}
public interface StringRetriever {
public Collection<HString> getAllHString();
public HString getHString(int ordinal);
}
}
| 9,340 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer/HollowConsumer.java | /*
* Copyright 2016-2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.consumer;
import static com.netflix.hollow.core.util.Threads.daemonThread;
import static java.util.concurrent.Executors.newSingleThreadExecutor;
import com.netflix.hollow.PublicApi;
import com.netflix.hollow.PublicSpi;
import com.netflix.hollow.api.client.FailedTransitionTracker;
import com.netflix.hollow.api.client.HollowAPIFactory;
import com.netflix.hollow.api.client.HollowClientUpdater;
import com.netflix.hollow.api.client.StaleHollowReferenceDetector;
import com.netflix.hollow.api.codegen.HollowAPIClassJavaGenerator;
import com.netflix.hollow.api.consumer.fs.HollowFilesystemBlobRetriever;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.api.metrics.HollowConsumerMetrics;
import com.netflix.hollow.api.metrics.HollowMetricsCollector;
import com.netflix.hollow.core.HollowConstants;
import com.netflix.hollow.core.memory.MemoryMode;
import com.netflix.hollow.core.read.OptionalBlobPartInput;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.filter.HollowFilterConfig;
import com.netflix.hollow.core.read.filter.TypeFilter;
import com.netflix.hollow.core.util.DefaultHashCodeFinder;
import com.netflix.hollow.core.util.HollowObjectHashCodeFinder;
import com.netflix.hollow.tools.history.HollowHistory;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.UnaryOperator;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* A HollowConsumer is the top-level class used by consumers of Hollow data to initialize and keep up-to-date a local in-memory
* copy of a hollow dataset. The interactions between the "blob" transition store and announcement listener are defined by
* this class, and the implementations of the data retrieval, announcement mechanism are abstracted in the interfaces which
* are provided to this class.
* <p>
* To obtain a HollowConsumer, you should use a builder pattern, for example:
* <pre>{@code
* HollowConsumer consumer = newHollowConsumer().withBlobRetriever(retriever)
* .withAnnouncementWatcher(watcher)
* .withGeneratedAPIClass(MovieAPI.class)
* .build();
* }</pre>
* <p>
* The following components are injectable, but only an implementation of the HollowConsumer.BlobRetriever is
* required to be injected, all other components are optional. :
* <dl>
* <dt>{@link HollowConsumer.BlobRetriever}</dt>
* <dd>Implementations of this class define how to retrieve blob data from the blob store.</dd>
*
* <dt>{@link HollowConsumer.AnnouncementWatcher}</dt>
* <dd>Implementations of this class define the announcement mechanism, which is used to track the version of the
* currently announced state. It's also expected that implementations will trigger a refresh each time current
* data version is updated.</dd>
*
* <dt>a List of {@link HollowConsumer.RefreshListener}s</dt>
* <dd>RefreshListener implementations will define what to do when various events happen before, during, and after updating
* local in-memory copies of hollow data sets.</dd>
*
* <dt>the Class representing a generated Hollow API</dt>
* <dd>Defines how to create a {@link HollowAPI} for the dataset, useful when wrapping a dataset with an api which has
* been generated (via the {@link HollowAPIClassJavaGenerator})</dd>
*
* <dt>{@link HollowFilterConfig}</dt>
* <dd>Defines what types and fields to load (or not load) into memory from hollow datasets. Generally useful to reduce
* heap footprint on consumers which do not require visibility of an entire dataset.</dd>
*
* <dt>{@link HollowConsumer.DoubleSnapshotConfig}</dt>
* <dd>Defines whether this consumer may attempt a double snapshot, and how many deltas will be attempted during a single refresh.
* A double snapshot will allow your consumer to update in case of a broken delta chain, but will also result in a doubling of
* the heap footprint while the double snapshot is occurring.</dd>
*
* <dt>{@link HollowConsumer.ObjectLongevityConfig}</dt>
* <dd>Object longevity is used to guarantee that Hollow objects which are backed by removed records will remain usable and
* consistent until old references are discarded. This behavior is turned off by default. Implementations of this config
* can be used to enable and configure this behavior.</dd>
*
* <dt>{@link HollowConsumer.ObjectLongevityDetector}</dt>
* <dd>Implementations of this config will be notified when usage of expired Hollow object references is attempted.</dd>
*
* <dt>An Executor</dt>
* <dd>The Executor which will be used to perform updates when {@link #triggerAsyncRefresh()} is called. This will
* default to a new fixed thread pool with a single refresh thread.</dd>
*
* </dl>
*/
@SuppressWarnings({"unused", "WeakerAccess"})
@PublicApi
public class HollowConsumer {
private static final Logger LOG = Logger.getLogger(HollowConsumer.class.getName());
protected final AnnouncementWatcher announcementWatcher;
protected final HollowClientUpdater updater;
protected final ReadWriteLock refreshLock;
protected final HollowConsumerMetrics metrics;
private final Executor refreshExecutor;
private final MemoryMode memoryMode;
/**
* @deprecated use {@link HollowConsumer.Builder}
*/
@Deprecated
protected HollowConsumer(BlobRetriever blobRetriever,
AnnouncementWatcher announcementWatcher,
List<RefreshListener> refreshListeners,
HollowAPIFactory apiFactory,
HollowFilterConfig dataFilter,
ObjectLongevityConfig objectLongevityConfig,
ObjectLongevityDetector objectLongevityDetector,
DoubleSnapshotConfig doubleSnapshotConfig,
HollowObjectHashCodeFinder hashCodeFinder,
Executor refreshExecutor,
MemoryMode memoryMode) {
this(blobRetriever, announcementWatcher, refreshListeners, apiFactory, dataFilter,
objectLongevityConfig, objectLongevityDetector, doubleSnapshotConfig,
hashCodeFinder, refreshExecutor, memoryMode,null);
}
/**
* @deprecated use {@link HollowConsumer.Builder}
*/
@Deprecated
protected HollowConsumer(BlobRetriever blobRetriever,
AnnouncementWatcher announcementWatcher,
List<RefreshListener> refreshListeners,
HollowAPIFactory apiFactory,
HollowFilterConfig dataFilter,
ObjectLongevityConfig objectLongevityConfig,
ObjectLongevityDetector objectLongevityDetector,
DoubleSnapshotConfig doubleSnapshotConfig,
HollowObjectHashCodeFinder hashCodeFinder,
Executor refreshExecutor,
MemoryMode memoryMode,
HollowMetricsCollector<HollowConsumerMetrics> metricsCollector) {
this.metrics = new HollowConsumerMetrics();
this.updater = new HollowClientUpdater(blobRetriever,
refreshListeners,
apiFactory,
doubleSnapshotConfig,
hashCodeFinder,
memoryMode,
objectLongevityConfig,
objectLongevityDetector,
metrics,
metricsCollector);
updater.setFilter(dataFilter);
this.announcementWatcher = announcementWatcher;
this.refreshExecutor = refreshExecutor;
this.refreshLock = new ReentrantReadWriteLock();
if (announcementWatcher != null)
announcementWatcher.subscribeToUpdates(this);
this.memoryMode = memoryMode;
}
protected <B extends Builder<B>> HollowConsumer(B builder) {
// duplicated with HollowConsumer(...) constructor above. We cannot chain constructor calls because that
// constructor subscribes to the announcement watcher and we have more setup to do first
this.metrics = new HollowConsumerMetrics();
this.updater = new HollowClientUpdater(builder.blobRetriever,
builder.refreshListeners,
builder.apiFactory,
builder.doubleSnapshotConfig,
builder.hashCodeFinder,
builder.memoryMode,
builder.objectLongevityConfig,
builder.objectLongevityDetector,
metrics,
builder.metricsCollector);
updater.setFilter(builder.typeFilter);
if(builder.skipTypeShardUpdateWithNoAdditions)
updater.setSkipShardUpdateWithNoAdditions(true);
this.announcementWatcher = builder.announcementWatcher;
this.refreshExecutor = builder.refreshExecutor;
this.refreshLock = new ReentrantReadWriteLock();
this.memoryMode = builder.memoryMode;
if (announcementWatcher != null)
announcementWatcher.subscribeToUpdates(this);
}
/**
* Triggers a refresh to the latest version specified by the {@link HollowConsumer.AnnouncementWatcher}.
* If already on the latest version, this operation is a no-op.
* <p>
* If a {@link HollowConsumer.AnnouncementWatcher} is not present, this call trigger a refresh to the
* latest version available in the blob store.
* <p>
* This is a blocking call.
*/
public void triggerRefresh() {
refreshLock.writeLock().lock();
try {
updater.updateTo(announcementWatcher == null ? new VersionInfo(Long.MAX_VALUE)
: announcementWatcher.getLatestVersionInfo());
} catch (Error | RuntimeException e) {
throw e;
} catch (Throwable t) {
throw new RuntimeException(t);
} finally {
refreshLock.writeLock().unlock();
}
}
/**
* Immediately triggers a refresh in a different thread to the latest version
* specified by the {@link HollowConsumer.AnnouncementWatcher}. If already on
* the latest version, this operation is a no-op.
* <p>
* If a {@link HollowConsumer.AnnouncementWatcher} is not present, this call trigger a refresh to the
* latest version available in the blob store.
* <p>
* This is an asynchronous call.
*/
public void triggerAsyncRefresh() {
triggerAsyncRefreshWithDelay(0);
}
/**
* Triggers async refresh after the specified number of milliseconds has passed.
* <p>
* Any subsequent calls for async refresh will not begin until after the specified delay
* has completed.
*
* @param delayMillis the delay, in millseconds, before triggering the refresh
*/
public void triggerAsyncRefreshWithDelay(int delayMillis) {
final long targetBeginTime = System.currentTimeMillis() + delayMillis;
refreshExecutor.execute(() -> {
try {
long delay = targetBeginTime - System.currentTimeMillis();
if (delay > 0)
Thread.sleep(delay);
} catch (InterruptedException e) {
// Interrupting, such as shutting down the executor pool,
// cancels the trigger
LOG.log(Level.INFO, "Async refresh interrupted before trigger, refresh cancelled", e);
return;
}
try {
triggerRefresh();
} catch (Error | RuntimeException e) {
// Ensure exceptions are propagated to the executor
LOG.log(Level.SEVERE, "Async refresh failed", e);
throw e;
}
});
}
/**
* If a {@link HollowConsumer.AnnouncementWatcher} is not specified, then this method will attempt to update
* to the specified version, and if the specified version does not exist then to a different version as specified
* by functionality in the {@code BlobRetriever}.
* <p>
* Otherwise, an UnsupportedOperationException will be thrown.
* <p>
* This is a blocking call.
*
* @param version the version to refresh to
*/
public void triggerRefreshTo(long version) {
triggerRefreshTo(new VersionInfo(version));
}
/**
* Similar to {@link #triggerRefreshTo(long)} but instead of accepting a long version no. it accepts a
* {@link VersionInfo} instance that contains (in addition to version no.) version specific metadata and
* pinning status.
*
* @param versionInfo version no., metadata, and pined status for the desired version
*/
public void triggerRefreshTo(VersionInfo versionInfo) {
if (announcementWatcher != null)
throw new UnsupportedOperationException("Cannot trigger refresh to specified version when a HollowConsumer.AnnouncementWatcher is present");
try {
updater.updateTo(versionInfo);
} catch (Error | RuntimeException e) {
throw e;
} catch (Throwable t) {
throw new RuntimeException(t);
}
}
/**
* @return the {@link HollowReadStateEngine} which is holding the underlying hollow dataset.
*/
public HollowReadStateEngine getStateEngine() {
return updater.getStateEngine();
}
/**
* @return the current version of the dataset. This is the unique identifier of the data's state.
*/
public long getCurrentVersionId() {
return updater.getCurrentVersionId();
}
/**
* Returns a {@code CompletableFuture} that completes after the initial data load succeeds. Also triggers the initial
* load asynchronously, to avoid waiting on a polling interval for the initial load.
* <p>
* Callers can use methods like {@link CompletableFuture#join()} or {@link CompletableFuture#get(long, TimeUnit)}
* to block until the initial load is complete.
* <p>
* A failure during the initial load <em>will not</em> cause the future to complete exceptionally; this allows
* for a subsequent data version to eventually succeed.
* <p>
* In a consumer without published or announced versions – or one that always fails the initial load – the future
* will remain incomplete indefinitely.
*
* @return a future which, when completed, has a value set to the data version that was initially loaded
*/
public CompletableFuture<Long> getInitialLoad() {
try {
triggerAsyncRefresh();
} catch (RejectedExecutionException | NullPointerException e) {
LOG.log(Level.INFO, "Refresh triggered by getInitialLoad() failed; future attempts might succeed", e);
}
return updater.getInitialLoad();
}
/**
* @return the api which wraps the underlying dataset.
*/
public HollowAPI getAPI() {
return updater.getAPI();
}
/**
* Equivalent to calling {@link #getAPI()} and casting to the specified API.
*
* @param apiClass the class of the API
* @param <T> the type of the API
* @return the API which wraps the underlying dataset
*/
public <T extends HollowAPI> T getAPI(Class<T> apiClass) {
return apiClass.cast(updater.getAPI());
}
/**
* Will force a double snapshot refresh on the next update.
*/
public void forceDoubleSnapshotNextUpdate() {
updater.forceDoubleSnapshotNextUpdate();
}
/**
* Clear any failed transitions from the {@link FailedTransitionTracker}, so that they may be reattempted when an update is triggered.
*/
public void clearFailedTransitions() {
updater.clearFailedTransitions();
}
/**
* @return the number of failed snapshot transitions stored in the {@link FailedTransitionTracker}.
*/
public int getNumFailedSnapshotTransitions() {
return updater.getNumFailedSnapshotTransitions();
}
/**
* @return the number of failed delta transitions stored in the {@link FailedTransitionTracker}.
*/
public int getNumFailedDeltaTransitions() {
return updater.getNumFailedDeltaTransitions();
}
/**
* @return a {@link ReadWriteLock#readLock()}, the corresponding writeLock() of which is used to synchronize refreshes.
* <p>
* This is useful if performing long-running operations which require a consistent view of the entire dataset in a
* single data state, to guarantee that updates do not happen while the operation runs.
*/
public Lock getRefreshLock() {
return refreshLock.readLock();
}
/**
* Adds a {@link RefreshListener} to this consumer.
* <p>
* If the listener was previously added to this consumer, as determined by reference equality or {@code Object}
* equality, then this method does nothing.
* <p>
* If a listener is added, concurrently, during the occurrence of a refresh then the listener will not receive
* events until the next refresh. The listener may also be removed concurrently.
* <p>
* If the listener instance implements {@link RefreshRegistrationListener} then before the listener is added
* the {@link RefreshRegistrationListener#onBeforeAddition} method is be invoked. If that method throws an
* exception then that exception will be thrown by this method and the listener will not be added.
*
* @param listener the refresh listener to add
*/
public void addRefreshListener(RefreshListener listener) {
updater.addRefreshListener(listener, this);
}
/**
* Removes a {@link RefreshListener} from this consumer.
* <p>
* If the listener was not previously added to this consumer, as determined by reference equality or {@code Object}
* equality, then this method does nothing.
* <p>
* If a listener is removed, concurrently, during the occurrence of a refresh then the listener will receive all
* events for that refresh but not receive events for subsequent any refreshes.
* <p>
* If the listener instance implements {@link RefreshRegistrationListener} then after the listener is removed
* the {@link RefreshRegistrationListener#onAfterRemoval} method is be invoked. If that method throws an
* exception then that exception will be thrown by this method.
*
* @param listener the refresh listener to remove
*/
public void removeRefreshListener(RefreshListener listener) {
updater.removeRefreshListener(listener, this);
}
/**
* @return the metrics for this consumer
*/
public HollowConsumerMetrics getMetrics() {
return metrics;
}
/**
* An interface which defines the necessary interactions of Hollow with a blob data store.
* <p>
* Implementations will define how to retrieve blob data from a data store.
*/
public interface BlobRetriever {
/**
* Returns the snapshot for the state with the greatest version identifier which is equal to or less than the desired version
* @param desiredVersion the desired version
* @return the blob of the snapshot
*/
HollowConsumer.Blob retrieveSnapshotBlob(long desiredVersion);
/**
* Returns a delta transition which can be applied to the specified version identifier
* @param currentVersion the current version
* @return the blob of the delta
*/
HollowConsumer.Blob retrieveDeltaBlob(long currentVersion);
/**
* Returns a reverse delta transition which can be applied to the specified version identifier
* @param currentVersion the current version
* @return the blob of the reverse delta
*/
HollowConsumer.Blob retrieveReverseDeltaBlob(long currentVersion);
default Set<String> configuredOptionalBlobParts() {
return null;
}
default HollowConsumer.HeaderBlob retrieveHeaderBlob(long currentVersion) {
throw new UnsupportedOperationException();
}
}
protected interface VersionedBlob {
InputStream getInputStream() throws IOException;
default File getFile() throws IOException {
throw new UnsupportedOperationException();
}
}
public static abstract class HeaderBlob implements VersionedBlob{
private final long version;
protected HeaderBlob(long version) {
this.version = version;
}
public long getVersion() {
return this.version;
}
}
/**
* A Blob, which is either a snapshot or a delta, defines three things:
* <dl>
* <dt>The "from" version</dt>
* <dd>The unique identifier of the state to which a delta transition should be applied. If
* this is a snapshot, then this value is HollowConstants.VERSION_NONE.</dd>
*
* <dt>The "to" version</dt>
* <dd>The unique identifier of the state at which a dataset will arrive after this blob is applied.</dd>
*
* <dt>The actual blob data</dt>
* <dd>Implementations will define how to retrieve the actual blob data for this specific blob from a data store as an InputStream.</dd>
* </dl>
*/
public static abstract class Blob implements VersionedBlob{
protected final long fromVersion;
protected final long toVersion;
private final BlobType blobType;
/**
* Instantiate a snapshot to a specified data state version.
*
* @param toVersion the version
*/
public Blob(long toVersion) {
this(HollowConstants.VERSION_NONE, toVersion);
}
/**
* Instantiate a delta from one data state version to another.
*
* @param fromVersion the version to start the delta from
* @param toVersion the version to end the delta from
*/
public Blob(long fromVersion, long toVersion) {
this.fromVersion = fromVersion;
this.toVersion = toVersion;
if (this.isSnapshot())
this.blobType = BlobType.SNAPSHOT;
else if (this.isReverseDelta())
this.blobType = BlobType.REVERSE_DELTA;
else
this.blobType = BlobType.DELTA;
}
/**
* Implementations will define how to retrieve the actual blob data for this specific transition from a data store.
* <p>
* It is expected that the returned InputStream will not be interrupted. For this reason, it is a good idea to
* retrieve the entire blob (e.g. to disk) from a remote datastore prior to returning this stream.
*
* @return the input stream to the blob
* @throws IOException if the input stream to the blob cannot be obtained
*/
public abstract InputStream getInputStream() throws IOException;
/**
* Implementations may define how to retrieve the optional blob part data for this specific transition from a data store.
* <p>
* It is expected that none of the returned InputStreams will be interrupted. For this reason, it is a good idea to
* retrieve the entire blob part data (e.g. to disk) from a remote datastore prior to returning these streams.
*
* @return OptionalBlobPartInput
* @throws IOException exception in reading from blob or file
*/
public OptionalBlobPartInput getOptionalBlobPartInputs() throws IOException {
return null;
}
/**
* Blobs can be of types {@code SNAPSHOT}, {@code DELTA} or {@code REVERSE_DELTA}.
*/
public enum BlobType {
SNAPSHOT("snapshot"),
DELTA("delta"),
REVERSE_DELTA("reversedelta");
private final String type;
BlobType(String type) {
this.type = type;
}
public String getType() {
return this.type;
}
}
public boolean isSnapshot() {
return fromVersion == HollowConstants.VERSION_NONE;
}
public boolean isReverseDelta() {
return toVersion < fromVersion;
}
public boolean isDelta() {
return !isSnapshot() && !isReverseDelta();
}
public long getFromVersion() {
return fromVersion;
}
public long getToVersion() {
return toVersion;
}
public BlobType getBlobType() {
return blobType;
}
}
/**
* This class holds an announced version, its pinned status and the announcement metadata.
* isPinned and announcementMetadata fields are empty unless they are populated by the AnnouncementWatcher.
* */
public static class VersionInfo {
long version;
Optional<Boolean> isPinned;
Optional<Map<String, String>> announcementMetadata;
public VersionInfo(long version) {
this(version, Optional.empty(), Optional.empty());
}
public VersionInfo(long version, Optional<Map<String, String>> announcementMetadata, Optional<Boolean> isPinned) {
this.version = version;
this.announcementMetadata = announcementMetadata;
this.isPinned = isPinned;
}
public long getVersion() {
return version;
}
public Optional<Map<String, String>> getAnnouncementMetadata() {
return announcementMetadata;
}
public Optional<Boolean> isPinned() {
return isPinned;
}
}
/**
* Implementations of this class are responsible for two things:
* <p>
* 1) Tracking the latest announced data state version.
* 2) Keeping the client up to date by calling triggerAsyncRefresh() on self when the latest version changes.
* <p>
* If an AnnouncementWatcher is provided to a HollowConsumer, then calling HollowConsumer#triggerRefreshTo() is unsupported.
*/
public interface AnnouncementWatcher {
long NO_ANNOUNCEMENT_AVAILABLE = HollowConstants.VERSION_NONE;
/**
* @return the latest announced version.
*/
long getLatestVersion();
/**
* Implementations of this method should subscribe a HollowConsumer to updates to announced versions.
* <p>
* When announcements are received via a push mechanism, or polling reveals a new version, a call should be placed to one
* of the flavors of {@link HollowConsumer#triggerRefresh()} on the provided HollowConsumer.
*
* @param consumer the hollow consumer
*/
void subscribeToUpdates(HollowConsumer consumer);
/***
* @return versionInfo - the latest announced version, its pinned status and announcement metadata.
*/
default VersionInfo getLatestVersionInfo() {
return new VersionInfo(getLatestVersion(), Optional.empty(), Optional.empty());
}
}
public interface DoubleSnapshotConfig {
boolean allowDoubleSnapshot();
int maxDeltasBeforeDoubleSnapshot();
default boolean doubleSnapshotOnSchemaChange() { return false; }
DoubleSnapshotConfig DEFAULT_CONFIG = new DoubleSnapshotConfig() {
@Override
public int maxDeltasBeforeDoubleSnapshot() {
return 32;
}
@Override
public boolean allowDoubleSnapshot() {
return true;
}
};
}
public interface ObjectLongevityConfig {
/**
* @return whether or not long-lived object support is enabled.
* <p>
* Because Hollow reuses pooled memory, if references to Hollow records are held too long, the underlying data may
* be overwritten. When long-lived object support is enabled, Hollow records referenced via a {@link HollowAPI} will,
* after an update, be backed by a reserved copy of the data at the time the reference was created. This guarantees
* that even if a reference is held for a long time, it will continue to return the same data when interrogated.
* <p>
* These reserved copies are backed by the {@link HollowHistory} data structure.
*/
boolean enableLongLivedObjectSupport();
boolean enableExpiredUsageStackTraces();
/**
* @return if long-lived object support is enabled, the number of milliseconds before the {@link StaleHollowReferenceDetector}
* will begin flagging usage of stale objects.
*/
long gracePeriodMillis();
/**
* @return if long-lived object support is enabled, the number of milliseconds, after the grace period, during which
* data is still available in stale references, but usage will be flagged by the {@link StaleHollowReferenceDetector}.
* <p>
* After the grace period + usage detection period have expired, the data from stale references will become inaccessible if
* dropDataAutomatically() is enabled.
*/
long usageDetectionPeriodMillis();
/**
* @return whether or not to drop data behind stale references after the grace period + usage detection period has elapsed, assuming
* that no usage was detected during the usage detection period.
*/
boolean dropDataAutomatically();
/**
* @return whether data is dropped even if flagged during the usage detection period.
*/
boolean forceDropData();
ObjectLongevityConfig DEFAULT_CONFIG = new ObjectLongevityConfig() {
@Override
public boolean enableLongLivedObjectSupport() {
return false;
}
@Override
public boolean dropDataAutomatically() {
return false;
}
@Override
public boolean forceDropData() {
return false;
}
@Override
public boolean enableExpiredUsageStackTraces() {
return false;
}
@Override
public long usageDetectionPeriodMillis() {
return 60 * 60 * 1000;
}
@Override
public long gracePeriodMillis() {
return 60 * 60 * 1000;
}
};
}
/**
* Listens for stale Hollow object usage
*/
public interface ObjectLongevityDetector {
/**
* Stale reference detection hint. This will be called every ~30 seconds.
* <p>
* If a nonzero value is reported, then stale references to Hollow objects may be cached somewhere in your codebase.
* <p>
* This signal can be noisy, and a nonzero value indicates that some reference to stale data exists somewhere.
*
* @param count the count of stale references
*/
void staleReferenceExistenceDetected(int count);
/**
* Stale reference USAGE detection. This will be called every ~30 seconds.
* <p>
* If a nonzero value is reported, then stale references to Hollow objects are being accessed from somewhere in your codebase.
* <p>
* This signal is noiseless, and a nonzero value indicates that some reference to stale data is USED somewhere.
*
* @param count the count of stale references
*/
void staleReferenceUsageDetected(int count);
ObjectLongevityDetector DEFAULT_DETECTOR = new ObjectLongevityDetector() {
@Override
public void staleReferenceUsageDetected(int count) {
}
@Override
public void staleReferenceExistenceDetected(int count) {
}
};
}
/**
* Implementations of this class will define what to do when various events happen before, during, and after updating
* local in-memory copies of hollow data sets.
*/
public interface RefreshListener {
/**
* Called when a new version has been detected in consumer refresh (and just before refreshStarted). Surfaces
* metadata and pinning status pertaining to requested version if available from AnnouncementWatcher. Generally
* useful for logging/metrics.
*
* @param requestedVersionInfo requested version's information comprising version, announcement metadata and its pinned status
* */
default void versionDetected(VersionInfo requestedVersionInfo) {};
/**
* Indicates that a refresh has begun. Generally useful for logging.
* <p>
* A refresh is the process of a consumer getting from a current version to a desired version.
* <p>
* A refresh will consist of one of the following:
* <ul>
* <li>one or more deltas</li>
* <li>a snapshot load, plus zero or more deltas</li>
* </ul>
*
* @param currentVersion the current state version
* @param requestedVersion the version to which the refresh is progressing
*/
void refreshStarted(long currentVersion, long requestedVersion);
/**
* This method is called when either data was initialized for the first time, <i>or</i> an update occurred across a
* discontinuous delta chain (double snapshot).
* <p>
* If this method is called, it means that the current refresh consists of a snapshot load, plus zero or more deltas.
* <p>
* Implementations may initialize (or re-initialize) any indexing which is critical to keep in-sync with the data.
* <p>
* This method will be called a maximum of once per refresh, after the data has reached the final state of the refresh.
*
* @param api the {@link HollowAPI} instance
* @param stateEngine the {@link HollowReadStateEngine}
* @param version the current state version
* @throws Exception thrown if an error occurs in processing
*/
void snapshotUpdateOccurred(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception;
/**
* This method is called whenever a live state engine's data is updated with a delta. This method is <i>not</i>
* called during first time initialization or when an update across a discontinuous delta chain (double snapshot)
* occurs.
* <p>
* Implementations should incrementally update any indexing which is critical to keep in-sync with the data.
* <p>
* If this method is called, it means that the current refresh consists of one or more deltas, and does not include
* a snapshot load.
* <p>
* This method may be called multiple times per refresh, once for each time a delta is applied.
*
* @param api the {@link HollowAPI} instance
* @param stateEngine the {@link HollowReadStateEngine}
* @param version the current state version
* @throws Exception thrown if an error occurs in processing
*/
void deltaUpdateOccurred(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception;
/**
* Called to indicate a blob was loaded (either a snapshot or delta). Generally useful for logging or tracing of applied updates.
*
* @param transition The transition which was applied.
*/
void blobLoaded(HollowConsumer.Blob transition);
/**
* Indicates that a refresh completed successfully.
*
* @param beforeVersion - The version when the refresh started
* @param afterVersion - The version when the refresh completed
* @param requestedVersion - The specific version which was requested
*/
void refreshSuccessful(long beforeVersion, long afterVersion, long requestedVersion);
/**
* Indicates that a refresh failed with an Exception.
*
* @param beforeVersion - The version when the refresh started
* @param afterVersion - The version when the refresh completed
* @param requestedVersion - The specific version which was requested
* @param failureCause - The Exception which caused the failure.
*/
void refreshFailed(long beforeVersion, long afterVersion, long requestedVersion, Throwable failureCause);
}
public interface TransitionAwareRefreshListener extends RefreshListener {
/**
* This method is called <i>whenever</i> a snapshot is processed. In the case of first time initialization or an update
* across a discontinuous delta chain (double snapshot), this method will be called once (as the first transition).
* <p>
* Implementations may initialize (or re-initialize) any indexing which is critical to keep in-sync with the data.
*
* @param api the {@link HollowAPI} instance
* @param stateEngine the {@link HollowReadStateEngine}
* @param version the current state version
* @throws Exception thrown if an error occurs in processing
*/
void snapshotApplied(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception;
/**
* This method is called <i>whenever</i> a delta is processed. In the case of first time initialization or an update
* across a discontinuous delta chain (double snapshot), this method may be called one or more times before arriving
* at the final state (after which {@link #snapshotUpdateOccurred(HollowAPI, HollowReadStateEngine, long)} is called.
* <p>
* Implementations may incrementally update any indexing which is critical to keep in-sync with the data.
*
* @param api the {@link HollowAPI} instance
* @param stateEngine the {@link HollowReadStateEngine}
* @param version the current state version
* @throws Exception thrown if an error occurs in processing
*/
void deltaApplied(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception;
/**
* Called after refresh started and update plan has been initialized, but before the update plan starts executing.
* It is called only once per update plan (and thus only once per consumer refresh). Exposes details of the
* update plan.
* @implSpec The default implementation provided does nothing.
*
* @param beforeVersion The version when refresh started
* @param desiredVersion The version that the consumer refresh tries update to, even though it might not be attainable eg. HollowConstants.VERSION_LATEST
* @param isSnapshotPlan Indicates whether the refresh involves a snapshot transition
* @param transitionSequence List of transitions comprising the refresh
*/
default void transitionsPlanned(long beforeVersion, long desiredVersion, boolean isSnapshotPlan, List<HollowConsumer.Blob.BlobType> transitionSequence) {}
}
/**
* A listener of refresh listener addition and removal.
* <p>
* A {@link RefreshListener} implementation may implement this interface to get notified before
* the listener is added (via a call to {@link #addRefreshListener(RefreshListener)} and after a listener
* is removed (via a call to {@link #removeRefreshListener(RefreshListener)}.
* <p>
* An implementation should not add or remove itself in response to addition or removal. Such actions may result
* in a {@link StackOverflowError} or unspecified behaviour.
*/
public interface RefreshRegistrationListener {
/**
* Called before the refresh listener is added.
* @param c the consumer the associated reference listener is being added to
*/
void onBeforeAddition(HollowConsumer c);
/**
* Called after the refresh listener is removed.
* @param c the consumer the associated reference listener is being removed from
*/
void onAfterRemoval(HollowConsumer c);
}
public static class AbstractRefreshListener implements TransitionAwareRefreshListener {
@Override
public void refreshStarted(long currentVersion, long requestedVersion) {
// no-op
}
@Override
public void transitionsPlanned(long beforeVersion, long desiredVersion, boolean isSnapshotPlan, List<HollowConsumer.Blob.BlobType> transitionSequence) {
// no-op
}
@Override
public void snapshotUpdateOccurred(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception {
// no-op
}
@Override
public void deltaUpdateOccurred(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception {
// no-op
}
@Override
public void blobLoaded(Blob transition) {
// no-op
}
@Override
public void refreshSuccessful(long beforeVersion, long afterVersion, long requestedVersion) {
// no-op
}
@Override
public void refreshFailed(long beforeVersion, long afterVersion, long requestedVersion, Throwable failureCause) {
// no-op
}
@Override
public void snapshotApplied(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception {
// no-op
}
@Override
public void deltaApplied(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception {
// no-op
}
}
public static <B extends HollowConsumer.Builder<B>> HollowConsumer.Builder<B> newHollowConsumer() {
return new Builder<>();
}
/**
* Convenience method for {@code .newHollowConsumer().withBlobRetriever(...)}
*/
public static HollowConsumer.Builder<?> withBlobRetriever(HollowConsumer.BlobRetriever blobRetriever) {
HollowConsumer.Builder<?> builder = new Builder<>();
return builder.withBlobRetriever(blobRetriever);
}
/**
* @deprecated use {@link #newHollowConsumer()}, i.e. {@code newHollowConsumer().withLocalBlobStore(...)}
*/
public static HollowConsumer.Builder<?> withLocalBlobStore(File localBlobStoreDir) {
HollowConsumer.Builder<?> builder = new Builder<>();
return builder.withLocalBlobStore(localBlobStoreDir);
}
@SuppressWarnings("unchecked")
@PublicSpi
public static class Builder<B extends HollowConsumer.Builder<B>> {
protected HollowConsumer.BlobRetriever blobRetriever = null;
protected HollowConsumer.AnnouncementWatcher announcementWatcher = null;
/**
* @deprecated subclasses should use {@code typeFilter}
*/
@Deprecated
protected HollowFilterConfig filterConfig = null; // retained for binary compat
protected TypeFilter typeFilter = null;
protected List<HollowConsumer.RefreshListener> refreshListeners = new ArrayList<>();
protected HollowAPIFactory apiFactory = HollowAPIFactory.DEFAULT_FACTORY;
protected HollowObjectHashCodeFinder hashCodeFinder = new DefaultHashCodeFinder();
protected HollowConsumer.DoubleSnapshotConfig doubleSnapshotConfig = DoubleSnapshotConfig.DEFAULT_CONFIG;
protected HollowConsumer.ObjectLongevityConfig objectLongevityConfig = ObjectLongevityConfig.DEFAULT_CONFIG;
protected HollowConsumer.ObjectLongevityDetector objectLongevityDetector = ObjectLongevityDetector.DEFAULT_DETECTOR;
protected File localBlobStoreDir = null;
protected boolean useExistingStaleSnapshot;
protected Executor refreshExecutor = null;
protected MemoryMode memoryMode = MemoryMode.ON_HEAP;
protected HollowMetricsCollector<HollowConsumerMetrics> metricsCollector;
protected boolean skipTypeShardUpdateWithNoAdditions = false;
public B withBlobRetriever(HollowConsumer.BlobRetriever blobRetriever) {
this.blobRetriever = blobRetriever;
return (B)this;
}
/**
* This is the same as {@link #withLocalBlobStore(File, boolean)} where the boolean argument
* is set to {@code false}.
* @see #withLocalBlobStore(File, boolean)
*/
public B withLocalBlobStore(File localBlobStoreDir) {
this.localBlobStoreDir = localBlobStoreDir;
return (B)this;
}
/**
* @see #withLocalBlobStore(File)
*/
public B withLocalBlobStore(String localBlobStoreDir) {
return withLocalBlobStore(new File(localBlobStoreDir));
}
/**
* Provide a directory that will be used to cache blobs.
*
* When this is supplied, Hollow will look in this directory for a blob before falling back
* to retrieving using the configured {@link HollowConsumer.BlobRetriever}.
* If it does not find the blob in the configured local blob store directory, it will
* retrieve the blob using the configured blobRetriever and then use it from there.
* Note that cached files are never deleted from {@code localBlobStoreDir}, meaning if you
* use this on a long-running instance you will eventually run out of disk space. As such, a
* local blob store is better suited for local development, testing, or tooling purposes.
*
* @param localBlobStoreDir the directory used to store cached blobs. This will be created
* if it does not already exist.
* @param useExistingStaleSnapshot
* IF this is true
* AND Hollow is trying to retrieve a snapshot
* AND it hasn't already cached a snapshot for that specific version
* AND it has cached a snapshot of an older version
* THEN Hollow will use the older cached snapshot instead of fetching the desired snapshot
*/
public B withLocalBlobStore(File localBlobStoreDir, boolean useExistingStaleSnapshot) {
this.localBlobStoreDir = localBlobStoreDir;
this.useExistingStaleSnapshot = useExistingStaleSnapshot;
return (B)this;
}
/**
* @see #withLocalBlobStore(File, boolean)
*/
public B withLocalBlobStore(String localBlobStoreDir, boolean useExistingStaleSnapshot) {
return withLocalBlobStore(new File(localBlobStoreDir), useExistingStaleSnapshot);
}
public B withAnnouncementWatcher(HollowConsumer.AnnouncementWatcher announcementWatcher) {
this.announcementWatcher = announcementWatcher;
return (B)this;
}
public B withRefreshListener(HollowConsumer.RefreshListener refreshListener) {
refreshListeners.add(refreshListener);
return (B)this;
}
public B withRefreshListeners(HollowConsumer.RefreshListener... refreshListeners) {
Collections.addAll(this.refreshListeners, refreshListeners);
return (B)this;
}
/**
* Provide the code generated API class that extends {@link HollowAPI} with one or more types
* cached for direct field reads.
*
* All hollow record instances are created by one of two factories:
*
* <dl>
* <dt>{@link com.netflix.hollow.api.objects.provider.HollowObjectFactoryProvider}</dt>
* <dd>creates an instance of the corresponding {@code <Type>DelegateLookupImpl} (e.g.
* {@code StringDelegateLookupImpl} or {@code MovieDelegateLookupImpl} for core
* types or types in a generated client API respectively). Field accesses perform
* a lookup into the underlying high-density cache</dd>
*
* <dt>{@link com.netflix.hollow.api.objects.provider.HollowObjectCacheProvider}</dt>
* <dd>instantiates and caches the corresponding {@code <Type>DelegateCachedImpl} from
* the generated client API (e.g. {@code MovieDelegateCachedImpl}). For a given ordinal,
* the same {@code HollowRecord} instance is returned assuming the ordinal hasn't been removed.
* All of the type's fields are eagerly looked up from the high-density cache and stored as Java fields,
* making field access in tight loops or the hottest code paths more CPU efficient.</dd>
* </dl>
*
* Object caching should only be enabled for low cardinality, custom types in your data model.
*
* Use {@link #withGeneratedAPIClass(Class)} to build a consumer with your custom client API and
* using the default high-density cache for all types.
*
* @param generatedAPIClass the code generated API class
* @param cachedType the type to enable cache on to enable object caching on
* @param additionalCachedTypes More types to enable object caching on
*
* @return this builder
*
* @see <a href="https://hollow.how/advanced-topics/#caching">https://hollow.how/advanced-topics/#caching</a>
*/
public B withGeneratedAPIClass(Class<? extends HollowAPI> generatedAPIClass,
String cachedType,
String... additionalCachedTypes) {
if (HollowAPI.class.equals(generatedAPIClass))
throw new IllegalArgumentException("must provide a code generated API class");
generatedAPIClass = Objects.requireNonNull(generatedAPIClass, "API class cannot be null");
Objects.requireNonNull(additionalCachedTypes, "null detected for varargs parameter additionalCachedTypes");
String[] cachedTypes = new String[additionalCachedTypes.length + 1];
cachedTypes[0] = cachedType;
System.arraycopy(additionalCachedTypes, 0, cachedTypes, 1, additionalCachedTypes.length);
BitSet nulls = new BitSet(cachedTypes.length);
for (int i = 0; i < cachedTypes.length; ++i) {
if (cachedTypes[i] == null)
nulls.set(i);
}
if (!nulls.isEmpty())
throw new NullPointerException("cached types cannot be null; argsWithNull=" + nulls.toString());
this.apiFactory = new HollowAPIFactory.ForGeneratedAPI<>(generatedAPIClass, cachedTypes);
return (B)this;
}
/**
* Provide the code generated API class that extends {@link HollowAPI}.
*
* The instance returned from {@link HollowConsumer#getAPI()} will be of the provided type and can be cast
* to access generated methods.
*
* @param generatedAPIClass the code generated API class
* @return this builder
* @throws IllegalArgumentException if provided API class is {@code HollowAPI} instead of a subclass
*/
public B withGeneratedAPIClass(Class<? extends HollowAPI> generatedAPIClass) {
if (HollowAPI.class.equals(generatedAPIClass))
throw new IllegalArgumentException("must provide a code generated API class");
this.apiFactory = new HollowAPIFactory.ForGeneratedAPI<>(generatedAPIClass);
return (B)this;
}
/**
* <p>Specifies a filter config. Clears any type filter specified by {@code withTypeFilter(...)}.</p>
*
* <p>{@link HollowFilterConfig} is deprecated in favor of {@link TypeFilter}.</p>
*
* <p>{@code HollowFilterConfig} has these limitations:</p>
*
* <ul>
* <li>cannot mix inclusions and exclusions in a single filter and cannot compose filters</li>
* <li>recursive actions requires that callers already have the dataset's schema, leading to
* a chicken-and-egg situation</li>
* </ul>
*
* @return this builder
* @see #withTypeFilter(TypeFilter)
* @see #withTypeFilter(UnaryOperator)
* @deprecated use {@link #withTypeFilter(TypeFilter)} or {@link #withTypeFilter(UnaryOperator)}
*/
@Deprecated
public B withFilterConfig(HollowFilterConfig filterConfig) {
this.typeFilter = filterConfig;
return (B)this;
}
/**
* <p>Specifies the type filter. Clears any filter config specified by {@code withFilterConfig()}</p>
*
* <p>{@link TypeFilter} is the replacement API for {@link HollowFilterConfig}. It improves upon its
* limitations, but lacks the ability to serialize/deserialize like {@code HollowFilterConfig} has.</p>
*
* @param typeFilter
* @return this builder
* @see #withTypeFilter(UnaryOperator)
* @see #withFilterConfig(HollowFilterConfig)
*/
public B withTypeFilter(TypeFilter typeFilter) {
this.typeFilter = typeFilter;
return (B)this;
}
/**
* <p>Configures a new type filter on this consumer using a fluent API. Works as if by
* calling {@code withTypeFilter(op.apply(newTypeFilter())}. For example:</p>
*
* <pre>{@code
* consumerBuilder
* ...
* .withTypeFilter(filter -> filter
* .excludeAll()
* .includeRecursive("Alpha"))
* .build();
* }</pre>
*
* <p>Clears any filter config specified by {@code withFilterConfig(...)}.</p>
*
* <p>{@link TypeFilter} is the replacement API for {@link HollowFilterConfig}. It improves upon its
* limitations, but lacks the ability to serialize/deserialize like {@code HollowFilterConfig} has.</p>
*
* @param op
* @return this builder
* @see #withTypeFilter(TypeFilter)
* @see #withFilterConfig(HollowFilterConfig)
*/
public B withTypeFilter(UnaryOperator<TypeFilter.Builder> op) {
TypeFilter.Builder builder = op.apply(TypeFilter.newTypeFilter());
return withTypeFilter(builder.build());
}
public B withDoubleSnapshotConfig(HollowConsumer.DoubleSnapshotConfig doubleSnapshotConfig) {
this.doubleSnapshotConfig = doubleSnapshotConfig;
return (B)this;
}
public B withObjectLongevityConfig(HollowConsumer.ObjectLongevityConfig objectLongevityConfig) {
this.objectLongevityConfig = objectLongevityConfig;
return (B)this;
}
public B withObjectLongevityDetector(HollowConsumer.ObjectLongevityDetector objectLongevityDetector) {
this.objectLongevityDetector = objectLongevityDetector;
return (B)this;
}
public B withRefreshExecutor(Executor refreshExecutor) {
this.refreshExecutor = refreshExecutor;
return (B)this;
}
/**
* Experimental: By default the Hollow dataset is eagerly loaded to heap, but shared memory mode allows mmap-ing
* the dataset to virtual memory and only loading specific pages to main memory.
*
* This allows faster init and support for datasets larger that available physical memory. However, shared memory
* implementation is not suitable for production use given its limited functionality (no delta refreshes, no
* un-mmap of previous version, feature gaps like indexes continue to live on heap) and limited production hardening.
*/
public B withMemoryMode(MemoryMode memoryMode) {
this.memoryMode = memoryMode;
return (B)this;
}
public B withMetricsCollector(HollowMetricsCollector<HollowConsumerMetrics> metricsCollector) {
this.metricsCollector = metricsCollector;
return (B)this;
}
/**
* Experimental: When there are no updates for a type shard in a delta, skip updating that type shard.
*/
public B withSkipTypeShardUpdateWithNoAdditions() {
this.skipTypeShardUpdateWithNoAdditions = true;
return (B)this;
}
@Deprecated
public B withHashCodeFinder(HollowObjectHashCodeFinder hashCodeFinder) {
this.hashCodeFinder = hashCodeFinder;
return (B)this;
}
protected void checkArguments() {
if (filterConfig != null && typeFilter != null) {
// this should only be possible in custom subclasses that override #withFilterConfig(...)
throw new IllegalStateException("Only one of typeFilter and filterConfig can be set");
}
if (blobRetriever == null && localBlobStoreDir == null) {
throw new IllegalArgumentException(
"A HollowBlobRetriever or local blob store directory must be specified when building a HollowClient");
}
BlobRetriever blobRetriever = this.blobRetriever;
if (localBlobStoreDir != null) {
this.blobRetriever = new HollowFilesystemBlobRetriever(
localBlobStoreDir.toPath(), blobRetriever, useExistingStaleSnapshot);
}
if (refreshExecutor == null) {
refreshExecutor = newSingleThreadExecutor(r -> daemonThread(r, getClass(), "refresh"));
}
if (!memoryMode.consumerSupported()) {
throw new UnsupportedOperationException("Cinder Consumer in " + memoryMode + " mode is not supported");
}
if ((filterConfig != null || typeFilter != null) && !memoryMode.supportsFiltering()) {
throw new UnsupportedOperationException("Filtering is not supported in shared memory mode");
}
}
public HollowConsumer build() {
checkArguments();
if (filterConfig != null) {
// TODO: remove once deprecated #withFilterConfig is removed
typeFilter = filterConfig;
filterConfig = null;
}
return new HollowConsumer((B)this);
}
}
}
| 9,341 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer/metrics/UpdatePlanDetails.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.consumer.metrics;
import com.netflix.hollow.api.consumer.HollowConsumer.Blob.BlobType;
import java.util.List;
/**
* A class that contains details of the consumer refresh update plan that may be useful to report as metrics or logs.
* These details are computed in {@code AbstractRefreshMetricsListener} during execution of the update plan.
*/
public class UpdatePlanDetails {
long beforeVersion;
long desiredVersion;
List<BlobType> transitionSequence;
int numSuccessfulTransitions;
}
| 9,342 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer/metrics/RefreshMetricsReporting.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.consumer.metrics;
/**
* An interface to facilitate reporting of Hollow Consumer refresh metrics.
* <p>
* At different stages of Hollow Consumer refresh for eg. refresh start and refresh end, the methods in this interface
* are called with computed metrics. Hollow consumers can implement custom metrics reporting behavior by implementing
* these methods.
*/
public interface RefreshMetricsReporting {
/**
* Metrics for a refresh (such as duration, status, etc.) are passed to this method at the end of successful and
* failed refreshes. It allows classes to implement custom metrics reporting behavior.
*
* @param refreshMetrics Object containing refresh metrics such as duration, consecutive failures, etc.
*
* @see com.netflix.hollow.api.consumer.metrics.ConsumerRefreshMetrics
*/
void refreshEndMetricsReporting(ConsumerRefreshMetrics refreshMetrics);
}
| 9,343 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer/metrics/AbstractRefreshMetricsListener.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.consumer.metrics;
import static com.netflix.hollow.core.HollowConstants.VERSION_NONE;
import static com.netflix.hollow.core.HollowStateEngine.HEADER_TAG_METRIC_ANNOUNCEMENT;
import static com.netflix.hollow.core.HollowStateEngine.HEADER_TAG_METRIC_CYCLE_START;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.api.consumer.HollowConsumer.AbstractRefreshListener;
import com.netflix.hollow.api.consumer.HollowConsumer.Blob.BlobType;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.OptionalLong;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* A class for computing Hollow Consumer refresh metrics, requires plugging in metrics reporting implementation.
* <p>
* This class computes Hollow Consumer refresh metrics by listening to refresh events. At the end of every refresh, whether
* the refresh succeeded or failed, the refresh metrics in {@code ConsumerRefreshMetrics} and refresh details in {@code UpdatePlanDetails}
* are reporte using the {@code RefreshMetricsReporting} interface. This interface makes it mandatory for concrete subclasses
* to implement custom metrics reporting behavior.
*/
public abstract class AbstractRefreshMetricsListener extends AbstractRefreshListener implements RefreshMetricsReporting {
private static final Logger log = Logger.getLogger(AbstractRefreshMetricsListener.class.getName());
private OptionalLong lastRefreshTimeNanoOptional;
private long refreshStartTimeNano;
private long consecutiveFailures;
private BlobType overallRefreshType; // Indicates whether the overall refresh (that could comprise of multiple transitions)
// is classified as snapshot, delta, or reverse delta. Note that if a snapshot
// transition is present then the overall refresh type is snapshot.
private ConsumerRefreshMetrics.UpdatePlanDetails updatePlanDetails; // Some details about the transitions comprising a refresh
// visible for testing
ConsumerRefreshMetrics.Builder refreshMetricsBuilder;
private final Map<Long, Long> cycleVersionStartTimes;
private final Map<Long, Long> announcementTimestamps;
private volatile boolean namespacePinnedPreviously;
public AbstractRefreshMetricsListener() {
lastRefreshTimeNanoOptional = OptionalLong.empty();
consecutiveFailures = 0l;
cycleVersionStartTimes = new HashMap<>();
announcementTimestamps = new HashMap<>();
namespacePinnedPreviously = false;
}
public void refreshStarted(long currentVersion, long requestedVersion) {
updatePlanDetails = new ConsumerRefreshMetrics.UpdatePlanDetails();
refreshStartTimeNano = System.nanoTime();
refreshMetricsBuilder = new ConsumerRefreshMetrics.Builder();
refreshMetricsBuilder.setIsInitialLoad(currentVersion == VERSION_NONE);
refreshMetricsBuilder.setUpdatePlanDetails(updatePlanDetails);
cycleVersionStartTimes.clear(); // clear map to avoid accumulation over time
}
@Override
public void versionDetected(HollowConsumer.VersionInfo requestedVersionInfo) {
announcementTimestamps.clear(); // clear map to avoid accumulation over time
if (requestedVersionInfo.isPinned() == null || requestedVersionInfo.getAnnouncementMetadata() == null) {
return;
}
if (!(requestedVersionInfo.isPinned().isPresent() && requestedVersionInfo.getAnnouncementMetadata().isPresent())) {
return;
}
boolean isPinned = requestedVersionInfo.isPinned().get();
// Track the version to announcement timestamp only when the namespace is not pinned (either in previous cycle
// or for the newVersion). Don't record this metric when a namespace was pinned previously and gets unpinned
// in the next cycle because this metric will record the refresh duration from the latest announced version.
if (!(namespacePinnedPreviously || isPinned)) {
trackTimestampsFromHeaders(requestedVersionInfo.getVersion(),
requestedVersionInfo.getAnnouncementMetadata().get(), HEADER_TAG_METRIC_ANNOUNCEMENT, announcementTimestamps);
}
namespacePinnedPreviously = isPinned;
}
/**
* This method acquires details of individual transitions that comprise a larger refresh.
* <p>
* Details of transitions in a refresh such as count and type can be useful to understand consumer performance and
* to troubleshoot issues relating to refresh failure.
* </p>
* @param beforeVersion The version when refresh started
* @param desiredVersion The version that the consumer refresh tries update to, even though it might not be attainable eg. HollowConstants.VERSION_LATEST
* @param isSnapshotPlan Indicates whether the refresh involves a snapshot transition
* @param transitionSequence List of transitions comprising the refresh
*/
@Override
public void transitionsPlanned(long beforeVersion, long desiredVersion, boolean isSnapshotPlan, List<HollowConsumer.Blob.BlobType> transitionSequence) {
updatePlanDetails.beforeVersion = beforeVersion;
updatePlanDetails.desiredVersion = desiredVersion;
updatePlanDetails.transitionSequence = transitionSequence;
if (isSnapshotPlan) {
overallRefreshType = BlobType.SNAPSHOT;
} else {
overallRefreshType = desiredVersion > beforeVersion ? BlobType.DELTA : BlobType.REVERSE_DELTA;
}
refreshMetricsBuilder.setOverallRefreshType(overallRefreshType);
}
@Override
public void blobLoaded(HollowConsumer.Blob transition) {
updatePlanDetails.numSuccessfulTransitions ++;
}
/**
* Metrics reporting implementation is provided by the extending subclass. If exceptions are not gracefully handled
* in the extending subclass then an exception there can fail the consumer refresh, even though metrics reporting
* might not be mission critical. This method protects against that scenario by catching all exceptions, logging
* that there was an exception, and continuing with the consumer refresh.
* @param refreshMetrics Consumer refresh metrics being reported
*/
private final void noFailRefreshEndMetricsReporting(ConsumerRefreshMetrics refreshMetrics) {
try {
refreshEndMetricsReporting(refreshMetrics);
} catch (Exception e) {
// Metric reporting is not considered critical to consumer refresh. Log exceptions and continue.
log.log(Level.SEVERE, "Encountered an exception in reporting consumer refresh metrics, ignoring exception "
+ "and continuing with consumer refresh", e);
}
}
@Override
public void refreshSuccessful(long beforeVersion, long afterVersion, long requestedVersion) {
long refreshEndTimeNano = System.nanoTime();
long durationMillis = TimeUnit.NANOSECONDS.toMillis(refreshEndTimeNano - refreshStartTimeNano);
consecutiveFailures = 0l;
lastRefreshTimeNanoOptional = OptionalLong.of(refreshEndTimeNano);
refreshMetricsBuilder.setDurationMillis(durationMillis)
.setIsRefreshSuccess(true)
.setConsecutiveFailures(consecutiveFailures)
.setRefreshSuccessAgeMillisOptional(0l)
.setRefreshEndTimeNano(refreshEndTimeNano);
if (cycleVersionStartTimes.containsKey(afterVersion)) {
refreshMetricsBuilder.setCycleStartTimestamp(cycleVersionStartTimes.get(afterVersion));
}
if (afterVersion == requestedVersion && announcementTimestamps.containsKey(afterVersion)) {
refreshMetricsBuilder.setAnnouncementTimestamp(announcementTimestamps.get(afterVersion));
}
noFailRefreshEndMetricsReporting(refreshMetricsBuilder.build());
}
@Override
public void refreshFailed(long beforeVersion, long afterVersion, long requestedVersion, Throwable failureCause) {
long refreshEndTimeNano = System.nanoTime();
long durationMillis = TimeUnit.NANOSECONDS.toMillis(refreshEndTimeNano - refreshStartTimeNano);
consecutiveFailures ++;
refreshMetricsBuilder.setDurationMillis(durationMillis)
.setIsRefreshSuccess(false)
.setConsecutiveFailures(consecutiveFailures)
.setRefreshEndTimeNano(refreshEndTimeNano);
if (lastRefreshTimeNanoOptional.isPresent()) {
refreshMetricsBuilder.setRefreshSuccessAgeMillisOptional(TimeUnit.NANOSECONDS.toMillis(
refreshEndTimeNano - lastRefreshTimeNanoOptional.getAsLong()));
}
if (cycleVersionStartTimes.containsKey(afterVersion)) {
refreshMetricsBuilder.setCycleStartTimestamp(cycleVersionStartTimes.get(afterVersion));
}
noFailRefreshEndMetricsReporting(refreshMetricsBuilder.build());
}
@Override
public void snapshotUpdateOccurred(HollowAPI refreshAPI, HollowReadStateEngine stateEngine, long version) {
trackTimestampsFromHeaders(version, stateEngine.getHeaderTags(), HEADER_TAG_METRIC_CYCLE_START, cycleVersionStartTimes);
}
@Override
public void deltaUpdateOccurred(HollowAPI refreshAPI, HollowReadStateEngine stateEngine, long version) {
trackTimestampsFromHeaders(version, stateEngine.getHeaderTags(), HEADER_TAG_METRIC_CYCLE_START, cycleVersionStartTimes);
}
/**
* If the blob header contains the timestamps like producer cycle start and announcement then save those values in
* the maps tracking version to cycle start time and version to announcement respectively.
*/
private void trackTimestampsFromHeaders(long version, Map<String, String> headers, String headerTag, Map<Long, Long> timestampsMap) {
if (headers != null) {
String headerTagValue = headers.get(headerTag);
if (headerTagValue != null && !headerTagValue.isEmpty()) {
try {
Long timestamp = Long.valueOf(headerTagValue);
if (timestamp != null) {
timestampsMap.put(version, timestamp);
}
} catch (NumberFormatException e) {
log.log(Level.WARNING, "Blob header contained " + headerTag + " but its value could"
+ "not be parsed as a long. Consumer metrics relying on this tag will be unreliable.", e);
}
}
}
}
@Override
public void snapshotApplied(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception {
// no-op
}
@Override
public void deltaApplied(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception {
// no-op
}
} | 9,344 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer/metrics/ConsumerRefreshMetrics.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.consumer.metrics;
import com.netflix.hollow.api.consumer.HollowConsumer.Blob.BlobType;
import java.util.List;
import java.util.OptionalLong;
public class ConsumerRefreshMetrics {
private long durationMillis;
private boolean isRefreshSuccess; // true if refresh was successful, false if refresh failed
private boolean isInitialLoad; // true if initial load, false if subsequent refresh
private BlobType overallRefreshType; // snapshot, delta, or reverse delta
private UpdatePlanDetails updatePlanDetails; // details about the update plan such as no. and types of transitions and no. of successful transitions
private long consecutiveFailures;
private OptionalLong refreshSuccessAgeMillisOptional; // time elapsed since the previous successful refresh
private long refreshEndTimeNano; // monotonic system time when refresh ended
private OptionalLong cycleStartTimestamp; // timestamp in millis of when cycle started for the loaded data version
private OptionalLong announcementTimestamp; // timestamp in milliseconds to mark announcement for the loaded data version
/**
* A class that contains details of the consumer refresh update plan that may be useful to report as metrics or logs.
* These details are computed in {@code AbstractRefreshMetricsListener} during execution of the update plan.
*/
public static class UpdatePlanDetails {
long beforeVersion;
long desiredVersion;
List<BlobType> transitionSequence;
int numSuccessfulTransitions;
public long getBeforeVersion() {
return beforeVersion;
}
public long getDesiredVersion() {
return desiredVersion;
}
public List<BlobType> getTransitionSequence() {
return transitionSequence;
}
public int getNumSuccessfulTransitions() {
return numSuccessfulTransitions;
}
}
public long getDurationMillis() {
return durationMillis;
}
public boolean getIsRefreshSuccess() {
return isRefreshSuccess;
}
public boolean getIsInitialLoad() {
return isInitialLoad;
}
public BlobType getOverallRefreshType() {
return overallRefreshType;
}
public UpdatePlanDetails getUpdatePlanDetails() {
return updatePlanDetails;
}
public long getConsecutiveFailures() {
return consecutiveFailures;
}
public OptionalLong getRefreshSuccessAgeMillisOptional() {
return refreshSuccessAgeMillisOptional;
}
public long getRefreshEndTimeNano() {
return refreshEndTimeNano;
}
public OptionalLong getCycleStartTimestamp() {
return cycleStartTimestamp;
}
public OptionalLong getAnnouncementTimestamp() { return announcementTimestamp; }
private ConsumerRefreshMetrics(Builder builder) {
this.durationMillis = builder.durationMillis;
this.isRefreshSuccess = builder.isRefreshSuccess;
this.isInitialLoad = builder.isInitialLoad;
this.overallRefreshType = builder.overallRefreshType;
this.updatePlanDetails = builder.updatePlanDetails;
this.consecutiveFailures = builder.consecutiveFailures;
this.refreshSuccessAgeMillisOptional = builder.refreshSuccessAgeMillisOptional;
this.refreshEndTimeNano = builder.refreshEndTimeNano;
this.cycleStartTimestamp = builder.cycleStartTimestamp;
this.announcementTimestamp = builder.announcementTimestamp;
}
public static final class Builder {
private long durationMillis;
private boolean isRefreshSuccess;
private boolean isInitialLoad;
private BlobType overallRefreshType;
private UpdatePlanDetails updatePlanDetails;
private long consecutiveFailures;
private OptionalLong refreshSuccessAgeMillisOptional;
private long refreshEndTimeNano;
private OptionalLong cycleStartTimestamp;
private OptionalLong announcementTimestamp;
public Builder() {
refreshSuccessAgeMillisOptional = OptionalLong.empty();
cycleStartTimestamp = OptionalLong.empty();
announcementTimestamp = OptionalLong.empty();
}
public Builder setDurationMillis(long durationMillis) {
this.durationMillis = durationMillis;
return this;
}
public Builder setIsRefreshSuccess(boolean isRefreshSuccess) {
this.isRefreshSuccess = isRefreshSuccess;
return this;
}
public Builder setIsInitialLoad(boolean isInitialLoad) {
this.isInitialLoad = isInitialLoad;
return this;
}
public Builder setOverallRefreshType(BlobType overallRefreshType) {
this.overallRefreshType = overallRefreshType;
return this;
}
public Builder setUpdatePlanDetails(
UpdatePlanDetails updatePlanDetails) {
this.updatePlanDetails = updatePlanDetails;
return this;
}
public Builder setConsecutiveFailures(long consecutiveFailures) {
this.consecutiveFailures = consecutiveFailures;
return this;
}
public Builder setRefreshSuccessAgeMillisOptional(long refreshSuccessAgeMillis) {
this.refreshSuccessAgeMillisOptional = OptionalLong.of(refreshSuccessAgeMillis);
return this;
}
public Builder setRefreshEndTimeNano(long refreshEndTimeNano) {
this.refreshEndTimeNano = refreshEndTimeNano;
return this;
}
public Builder setCycleStartTimestamp(long cycleStartTimestamp) {
this.cycleStartTimestamp = OptionalLong.of(cycleStartTimestamp);
return this;
}
public Builder setAnnouncementTimestamp(long announcementTimestamp) {
this.announcementTimestamp = OptionalLong.of(announcementTimestamp);
return this;
}
public ConsumerRefreshMetrics build() {
return new ConsumerRefreshMetrics(this);
}
}
} | 9,345 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer/index/AbstractHollowHashIndex.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.hollow.api.consumer.index;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.core.index.HollowHashIndex;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
/**
* Intended for internal use only - used by API code generator
* @deprecated see {@link HashIndex}
*/
// TODO(timt): how to move to `API extends HollowAPI` without binary incompatiblity of access to the `api`
// field in generated subclasses, e.g. `find*Matches(...)`
@Deprecated
public abstract class AbstractHollowHashIndex<API> {
protected final HollowConsumer consumer;
protected final String queryType;
protected final String selectFieldPath;
protected final String matchFieldPaths[];
protected HollowHashIndex idx;
protected API api;
protected boolean isListenToDataRefresh;
protected RefreshListener refreshListener;
public AbstractHollowHashIndex(HollowConsumer consumer, boolean isListenToDataRefreah, String queryType, String selectFieldPath, String... matchFieldPaths) {
this.consumer = consumer;
this.queryType = queryType;
this.selectFieldPath = selectFieldPath;
this.matchFieldPaths = matchFieldPaths;
consumer.getRefreshLock().lock();
try {
this.api = castAPI(consumer.getAPI());
this.idx = new HollowHashIndex(consumer.getStateEngine(), queryType, selectFieldPath, matchFieldPaths);
this.refreshListener = new RefreshListener();
if (isListenToDataRefreah) {
listenToDataRefresh();
}
} catch(ClassCastException cce) {
throw new ClassCastException("The HollowConsumer provided was not created with the PackageErgoTestAPI generated API class.");
} finally {
consumer.getRefreshLock().unlock();
}
}
@SuppressWarnings("unchecked")
private API castAPI(HollowAPI api) {
return (API) api;
}
@Deprecated
public boolean isListenToDataRefreah() {
return isListenToDataRefresh;
}
@Deprecated
public void listenToDataRefreah() {
listenToDataRefresh();
}
public boolean isListenToDataRefresh() {
return isListenToDataRefresh;
}
public void listenToDataRefresh() {
if (isListenToDataRefresh) return;
isListenToDataRefresh = true;
idx.listenForDeltaUpdates();
consumer.addRefreshListener(refreshListener);
}
public void detachFromDataRefresh() {
isListenToDataRefresh = false;
idx.detachFromDeltaUpdates();
consumer.removeRefreshListener(refreshListener);
}
private class RefreshListener implements HollowConsumer.RefreshListener {
@Override
public void snapshotUpdateOccurred(HollowAPI refreshAPI, HollowReadStateEngine stateEngine, long version) {
idx.detachFromDeltaUpdates();
idx = new HollowHashIndex(stateEngine, queryType, selectFieldPath, matchFieldPaths);
idx.listenForDeltaUpdates();
api = castAPI(refreshAPI);
}
@Override
public void deltaUpdateOccurred(HollowAPI refreshAPI, HollowReadStateEngine stateEngine, long version) {
api = castAPI(refreshAPI);
}
@Override public void refreshStarted(long currentVersion, long requestedVersion) { }
@Override public void blobLoaded(HollowConsumer.Blob transition) { }
@Override public void refreshSuccessful(long beforeVersion, long afterVersion, long requestedVersion) { }
@Override public void refreshFailed(long beforeVersion, long afterVersion, long requestedVersion, Throwable failureCause) { }
}
} | 9,346 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer/index/AbstractHollowUniqueKeyIndex.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.hollow.api.consumer.index;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.core.index.HollowPrimaryKeyIndex;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
/**
* Intended for internal use only - used by API code generator
*,
* @author dsu
*/
// TODO(timt): how to move to `API extends HollowAPI` without binary incompatiblity of access to the `api`
// field in generated subclasses, e.g. `findMatches(...)`
public abstract class AbstractHollowUniqueKeyIndex<API, T> {
protected final HollowConsumer consumer;
protected HollowPrimaryKeyIndex idx;
protected API api;
protected boolean isListenToDataRefresh;
protected RefreshListener refreshListener;
public AbstractHollowUniqueKeyIndex(HollowConsumer consumer, String type, boolean isListenToDataRefresh, String... fieldPaths) {
consumer.getRefreshLock().lock();
try {
this.consumer = consumer;
this.api = castAPI(consumer.getAPI());
this.idx = new HollowPrimaryKeyIndex(consumer.getStateEngine(), type, fieldPaths);
this.refreshListener = new RefreshListener();
if (isListenToDataRefresh) {
listenToDataRefresh();
}
} catch (ClassCastException cce) {
throw new ClassCastException("The HollowConsumer provided was not created with the PackageErgoTestAPI generated API class.");
} finally {
consumer.getRefreshLock().unlock();
}
}
@SuppressWarnings("unchecked")
private API castAPI(HollowAPI api) {
return (API) api;
}
@Deprecated
public boolean isListenToDataRefreah() {
return isListenToDataRefresh;
}
@Deprecated
public void listenToDataRefreah() {
listenToDataRefresh();
}
public boolean isListenToDataRefresh() {
return isListenToDataRefresh;
}
public void listenToDataRefresh() {
if (isListenToDataRefresh) return;
isListenToDataRefresh = true;
idx.listenForDeltaUpdates();
consumer.addRefreshListener(refreshListener);
}
public void detachFromDataRefresh() {
isListenToDataRefresh = false;
idx.detachFromDeltaUpdates();
consumer.removeRefreshListener(refreshListener);
}
private class RefreshListener implements HollowConsumer.RefreshListener {
@Override
public void snapshotUpdateOccurred(HollowAPI refreshAPI, HollowReadStateEngine stateEngine, long version) {
idx.detachFromDeltaUpdates();
idx = new HollowPrimaryKeyIndex(stateEngine, idx.getPrimaryKey());
idx.listenForDeltaUpdates();
api = castAPI(refreshAPI);
}
@Override
public void deltaUpdateOccurred(HollowAPI refreshAPI, HollowReadStateEngine stateEngine, long version) {
api = castAPI(refreshAPI);
}
@Override public void refreshStarted(long currentVersion, long requestedVersion) { }
@Override public void blobLoaded(HollowConsumer.Blob transition) { }
@Override public void refreshSuccessful(long beforeVersion, long afterVersion, long requestedVersion) { }
@Override public void refreshFailed(long beforeVersion, long afterVersion, long requestedVersion, Throwable failureCause) { }
}
} | 9,347 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer/index/HashIndexSelect.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.consumer.index;
import static java.util.stream.Collectors.joining;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.api.objects.HollowRecord;
import com.netflix.hollow.core.index.FieldPaths;
import com.netflix.hollow.core.index.HollowHashIndex;
import com.netflix.hollow.core.index.HollowHashIndexResult;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.write.objectmapper.HollowObjectTypeMapper;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.function.Function;
import java.util.stream.Stream;
/**
* A type safe hash index, with result selection, for indexing non-primary-key data.
* <p>
* This type of index can map multiple keys to a single matching record,
* and/or multiple records to a single key.
* <p>
* If the index is {@link HollowConsumer#addRefreshListener(HollowConsumer.RefreshListener) registered} with its
* associated {@link HollowConsumer} then the index will track updates and changes will be reflected in matched results
* (performed after such updates). When a registered index is no longer needed it should be
* {@link HollowConsumer#removeRefreshListener(HollowConsumer.RefreshListener) deregistered} to avoid unnecessary
* index recalculation and to ensure the index is reclaimed by the garbage collector.
*
* @param <T> the root type
* @param <S> the select and result type
* @param <Q> the query type
*/
public class HashIndexSelect<T extends HollowRecord, S extends HollowRecord, Q>
implements HollowConsumer.RefreshListener, HollowConsumer.RefreshRegistrationListener, Function<Q, Stream<S>> {
final HollowConsumer consumer;
HollowAPI api;
final SelectFieldPathResultExtractor<S> selectField;
final List<MatchFieldPathArgumentExtractor<Q>> matchFields;
final String rootTypeName;
final String selectFieldPath;
final String[] matchFieldPaths;
HollowHashIndex hhi;
HashIndexSelect(
HollowConsumer consumer,
Class<T> rootType,
SelectFieldPathResultExtractor<S> selectField,
List<MatchFieldPathArgumentExtractor<Q>> matchFields) {
this.consumer = consumer;
this.api = consumer.getAPI();
this.selectField = selectField;
this.matchFields = matchFields;
// Validate select field path
// @@@ Add method to FieldPath
this.selectFieldPath = selectField.fieldPath.getSegments().stream().map(FieldPaths.FieldSegment::getName)
.collect(joining("."));
// Validate match field paths
this.matchFieldPaths = matchFields.stream()
// @@@ Add method to FieldPath
.map(mf -> mf.fieldPath.getSegments().stream().map(FieldPaths.FieldSegment::getName)
.collect(joining(".")))
.toArray(String[]::new);
this.rootTypeName = HollowObjectTypeMapper.getDefaultTypeName(rootType);
this.hhi = new HollowHashIndex(consumer.getStateEngine(), rootTypeName, selectFieldPath, matchFieldPaths);
}
HashIndexSelect(
HollowConsumer consumer,
Class<T> rootType,
Class<S> selectType, String selectField,
Class<Q> matchFieldsType) {
this(consumer,
rootType,
SelectFieldPathResultExtractor
.from(consumer.getAPI().getClass(), consumer.getStateEngine(), rootType, selectField,
selectType),
MatchFieldPathArgumentExtractor
.fromHolderClass(consumer.getStateEngine(), rootType, matchFieldsType,
FieldPaths::createFieldPathForHashIndex));
}
HashIndexSelect(
HollowConsumer consumer,
Class<T> rootType,
Class<S> selectType, String selectField,
String fieldPath, Class<Q> matchFieldType) {
this(consumer,
rootType,
SelectFieldPathResultExtractor
.from(consumer.getAPI().getClass(), consumer.getStateEngine(), rootType, selectField,
selectType),
Collections.singletonList(
MatchFieldPathArgumentExtractor
.fromPathAndType(consumer.getStateEngine(), rootType, fieldPath, matchFieldType,
FieldPaths::createFieldPathForHashIndex)));
}
/**
* Finds matches for a given query.
*
* @param query the query
* @return a stream of matching records (may be empty if there are no matches)
*/
@Override
public Stream<S> apply(Q query) {
return findMatches(query);
}
/**
* Finds matches for a given query.
*
* @param query the query
* @return a stream of matching records (may be empty if there are no matches)
*/
public Stream<S> findMatches(Q query) {
Object[] queryArray = matchFields.stream().map(mf -> mf.extract(query)).toArray();
HollowHashIndexResult matches = hhi.findMatches(queryArray);
if (matches == null) {
return Stream.empty();
}
return matches.stream().mapToObj(i -> selectField.extract(api, i));
}
// HollowConsumer.RefreshListener
@Override public void refreshStarted(long currentVersion, long requestedVersion) {
}
@Override public void snapshotUpdateOccurred(HollowAPI api, HollowReadStateEngine stateEngine, long version) {
HollowHashIndex hhi = this.hhi;
hhi.detachFromDeltaUpdates();
hhi = new HollowHashIndex(consumer.getStateEngine(), rootTypeName, selectFieldPath, matchFieldPaths);
hhi.listenForDeltaUpdates();
this.hhi = hhi;
this.api = api;
}
@Override public void deltaUpdateOccurred(HollowAPI api, HollowReadStateEngine stateEngine, long version) {
this.api = api;
}
@Override public void blobLoaded(HollowConsumer.Blob transition) {
}
@Override public void refreshSuccessful(long beforeVersion, long afterVersion, long requestedVersion) {
}
@Override public void refreshFailed(
long beforeVersion, long afterVersion, long requestedVersion, Throwable failureCause) {
}
// HollowConsumer.RefreshRegistrationListener
@Override public void onBeforeAddition(HollowConsumer c) {
if (c != consumer) {
throw new IllegalStateException("The index's consumer and the listener's consumer are not the same");
}
hhi.listenForDeltaUpdates();
}
@Override public void onAfterRemoval(HollowConsumer c) {
hhi.detachFromDeltaUpdates();
}
/**
* The builder of a {@link HashIndexSelect}.
*
* @param <T> the root type
* @param <S> the select, and result, type
*/
public static class BuilderWithSelect<T extends HollowRecord, S extends HollowRecord> {
final HollowConsumer consumer;
final Class<T> rootType;
final String selectFieldPath;
final Class<S> selectFieldType;
BuilderWithSelect(
HollowConsumer consumer, Class<T> rootType,
String selectFieldPath, Class<S> selectFieldType) {
this.consumer = consumer;
this.rootType = rootType;
this.selectFieldPath = selectFieldPath;
this.selectFieldType = selectFieldType;
}
/**
* Creates a {@link HashIndexSelect} for matching with field paths and types declared by
* {@link FieldPath} annotated fields or methods on the given query type.
*
* @param queryType the query type
* @param <Q> the query type
* @return a {@code HashIndexSelect}
* @throws IllegalArgumentException if the query type declares one or more invalid field paths
* or invalid types given resolution of corresponding field paths
* @throws IllegalArgumentException if the select field path is invalid, or the select field type
* is invalid given resolution of the select field path.
*/
public <Q> HashIndexSelect<T, S, Q> usingBean(Class<Q> queryType) {
Objects.requireNonNull(queryType);
return new HashIndexSelect<>(consumer, rootType, selectFieldType, selectFieldPath,
queryType);
}
/**
* Creates a {@link HashIndexSelect} for matching with a single query field path and type.
*
* @param queryFieldPath the query field path
* @param queryFieldType the query type
* @param <Q> the query type
* @return a {@code HashIndexSelect}
* @throws IllegalArgumentException if the query field path is empty or invalid
* @throws IllegalArgumentException if the query field type is invalid given resolution of the
* query field path
* @throws IllegalArgumentException if the select field path is invalid, or the select field type
* is invalid given resolution of the select field path.
*/
public <Q> HashIndexSelect<T, S, Q> usingPath(String queryFieldPath, Class<Q> queryFieldType) {
Objects.requireNonNull(queryFieldPath);
if (queryFieldPath.isEmpty()) {
throw new IllegalArgumentException("selectFieldPath argument is an empty String");
}
Objects.requireNonNull(queryFieldType);
return new HashIndexSelect<>(consumer, rootType, selectFieldType, selectFieldPath,
queryFieldPath, queryFieldType);
}
}
}
| 9,348 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer/index/SelectFieldPathResultExtractor.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.consumer.index;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.api.objects.HollowObject;
import com.netflix.hollow.api.objects.HollowRecord;
import com.netflix.hollow.api.objects.generic.GenericHollowObject;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.index.FieldPaths;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.write.objectmapper.HollowObjectTypeMapper;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.lang.invoke.MethodType;
/**
* An extractor that extracts a result value for an associated select field path transforming the value if
* necessary from an ordinal integer to an instance of a {@code HollowRecord} depending on the result type.
*
* @param <T> the result type
*/
final class SelectFieldPathResultExtractor<T> {
final FieldPaths.FieldPath<FieldPaths.FieldSegment> fieldPath;
final BiObjectIntFunction<HollowAPI, T> extractor;
SelectFieldPathResultExtractor(
FieldPaths.FieldPath<FieldPaths.FieldSegment> fieldPath,
BiObjectIntFunction<HollowAPI, T> extractor) {
this.fieldPath = fieldPath;
this.extractor = extractor;
}
interface BiObjectIntFunction<T, R> {
R apply(T t, int i);
}
T extract(HollowAPI api, int ordinal) {
return extractor.apply(api, ordinal);
}
static IllegalArgumentException incompatibleSelectType(
Class<?> selectType, String fieldPath, HollowObjectSchema.FieldType schemaFieldType) {
return new IllegalArgumentException(
String.format("Select type %s incompatible with field path %s resolving to field of type %s",
selectType.getName(), fieldPath, schemaFieldType));
}
static IllegalArgumentException incompatibleSelectType(Class<?> selectType, String fieldPath, String typeName) {
return new IllegalArgumentException(
String.format(
"Select type %s incompatible with field path %s resolving to field of reference type %s",
selectType.getName(), fieldPath, typeName));
}
static <T> SelectFieldPathResultExtractor<T> from(
Class<? extends HollowAPI> apiType, HollowDataset dataset, Class<?> rootType, String fieldPath,
Class<T> selectType) {
String rootTypeName = HollowObjectTypeMapper.getDefaultTypeName(rootType);
FieldPaths.FieldPath<FieldPaths.FieldSegment> fp =
FieldPaths.createFieldPathForHashIndex(dataset, rootTypeName, fieldPath);
String typeName;
if (!fp.getSegments().isEmpty()) {
// @@@ Method on FieldPath
FieldPaths.FieldSegment lastSegment = fp.getSegments().get(fp.getSegments().size() - 1);
HollowSchema.SchemaType schemaType = lastSegment.getEnclosingSchema().getSchemaType();
HollowObjectSchema.FieldType schemaFieldType;
if (schemaType == HollowSchema.SchemaType.OBJECT) {
FieldPaths.ObjectFieldSegment os = (FieldPaths.ObjectFieldSegment) lastSegment;
schemaFieldType = os.getType();
} else {
schemaFieldType = HollowObjectSchema.FieldType.REFERENCE;
}
typeName = lastSegment.getTypeName();
if (schemaFieldType != HollowObjectSchema.FieldType.REFERENCE) {
// The field path must reference a field of a reference type
// This is contrary to the underlying HollowHashIndex which selects
// the enclosing reference type for a field of a value type.
// It is considered better to be consistent and literal with field path
// expressions
throw incompatibleSelectType(selectType, fieldPath, schemaFieldType);
} else if (typeName.equals("String")) {
if (!HollowObject.class.isAssignableFrom(selectType)) {
throw incompatibleSelectType(selectType, fieldPath, typeName);
}
// @@@ Check that object schema has single value field of String type such as HString
} else if (!HollowObjectTypeMapper.getDefaultTypeName(selectType).equals(typeName)) {
if (schemaType != HollowSchema.SchemaType.OBJECT && !GenericHollowObject.class.isAssignableFrom(
selectType)) {
throw incompatibleSelectType(selectType, fieldPath, typeName);
}
// @@@ GenericHollow{List, Set, Map} based on schemaType
} else if (!HollowRecord.class.isAssignableFrom(selectType)) {
throw incompatibleSelectType(selectType, fieldPath, typeName);
}
} else {
typeName = rootTypeName;
}
if (GenericHollowObject.class.isAssignableFrom(selectType)) {
BiObjectIntFunction<HollowAPI, T> extractor =
(a, o) -> {
@SuppressWarnings("unchecked")
T t = (T) new GenericHollowObject(a.getDataAccess(), typeName, o);
return t;
};
return new SelectFieldPathResultExtractor<>(fp, extractor);
} else {
MethodHandle selectInstantiate;
try {
selectInstantiate = MethodHandles.lookup().findVirtual(
apiType,
"get" + selectType.getSimpleName(),
MethodType.methodType(selectType, int.class));
} catch (NoSuchMethodException | IllegalAccessException e) {
throw new IllegalArgumentException(
String.format("Select type %s is not associated with API %s",
selectType.getName(), apiType.getName()),
e);
}
BiObjectIntFunction<HollowAPI, T> extractor = (a, i) -> {
try {
@SuppressWarnings("unchecked")
T s = (T) selectInstantiate.invoke(a, i);
return s;
} catch (RuntimeException | Error e) {
throw e;
} catch (Throwable e) {
throw new RuntimeException(e);
}
};
return new SelectFieldPathResultExtractor<>(fp, extractor);
}
}
}
| 9,349 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer/index/HollowUniqueKeyIndex.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.hollow.api.consumer.index;
public interface HollowUniqueKeyIndex<T> {
T findMatch(Object... keys);
}
| 9,350 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer/index/UniqueKeyIndex.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.consumer.index;
import static java.util.stream.Collectors.toList;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.api.objects.HollowObject;
import com.netflix.hollow.core.HollowConstants;
import com.netflix.hollow.core.index.FieldPaths;
import com.netflix.hollow.core.index.HollowPrimaryKeyIndex;
import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.write.objectmapper.HollowObjectTypeMapper;
import com.netflix.hollow.core.write.objectmapper.HollowTypeMapper;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.function.Function;
import java.util.stream.Stream;
/**
* A type safe index for indexing with a unique key (such as a primary key).
* <p>
* If the index is {@link HollowConsumer#addRefreshListener(HollowConsumer.RefreshListener) registered} with its
* associated {@link HollowConsumer} then the index will track updates and changes will be reflected in matched results
* (performed after such updates). When a registered index is no longer needed it should be
* {@link HollowConsumer#removeRefreshListener(HollowConsumer.RefreshListener) deregistered} to avoid unnecessary
* index recalculation and to ensure the index is reclaimed by the garbage collector.
*
* @param <T> the unique type
* @param <Q> the key type
*/
public class UniqueKeyIndex<T extends HollowObject, Q>
implements HollowConsumer.RefreshListener, HollowConsumer.RefreshRegistrationListener, Function<Q, T> {
final HollowConsumer consumer;
HollowAPI api;
final SelectFieldPathResultExtractor<T> uniqueTypeExtractor;
final List<MatchFieldPathArgumentExtractor<Q>> matchFields;
final String uniqueSchemaName;
final String[] matchFieldPaths;
HollowPrimaryKeyIndex hpki;
UniqueKeyIndex(
HollowConsumer consumer,
Class<T> uniqueType,
PrimaryKey primaryTypeKey,
List<MatchFieldPathArgumentExtractor<Q>> matchFields) {
this.consumer = consumer;
this.api = consumer.getAPI();
this.uniqueSchemaName = HollowObjectTypeMapper.getDefaultTypeName(uniqueType);
this.uniqueTypeExtractor = SelectFieldPathResultExtractor
.from(consumer.getAPI().getClass(), consumer.getStateEngine(), uniqueType, "", uniqueType);
if (primaryTypeKey != null) {
matchFields = validatePrimaryKeyFieldPaths(consumer, uniqueSchemaName, primaryTypeKey, matchFields);
}
this.matchFields = matchFields;
this.matchFieldPaths = matchFields.stream()
.map(mf -> mf.fieldPath.toString())
.toArray(String[]::new);
this.hpki = new HollowPrimaryKeyIndex(consumer.getStateEngine(), uniqueSchemaName, matchFieldPaths);
}
static <Q> List<MatchFieldPathArgumentExtractor<Q>> validatePrimaryKeyFieldPaths(
HollowConsumer consumer, String primaryTypeName,
PrimaryKey primaryTypeKey, List<MatchFieldPathArgumentExtractor<Q>> matchFields) {
// Validate primary key field paths
List<FieldPaths.FieldPath<FieldPaths.ObjectFieldSegment>> paths = Stream.of(
primaryTypeKey.getFieldPaths())
.map(fp -> FieldPaths
.createFieldPathForPrimaryKey(consumer.getStateEngine(), primaryTypeName, fp))
.collect(toList());
// Validate that primary key field paths are the same as that on the match fields
// If so then match field extractors are shuffled to have the same order as primary key field paths
List<MatchFieldPathArgumentExtractor<Q>> orderedMatchFields = paths.stream().flatMap(
path -> {
MatchFieldPathArgumentExtractor<Q> mfe =
matchFields.stream().filter(e -> e.fieldPath.equals(path)).findFirst().orElse(null);
return mfe != null ? Stream.of(mfe) : null;
}).collect(toList());
if (orderedMatchFields.size() != paths.size()) {
// @@@
throw new IllegalArgumentException();
}
return orderedMatchFields;
}
UniqueKeyIndex(
HollowConsumer consumer,
Class<T> uniqueType,
PrimaryKey primaryTypeKey,
Class<Q> matchFieldsType) {
// @@@ Use FieldPaths.createFieldPathForPrimaryKey
this(consumer,
uniqueType,
primaryTypeKey,
MatchFieldPathArgumentExtractor
.fromHolderClass(consumer.getStateEngine(), uniqueType, matchFieldsType,
FieldPaths::createFieldPathForPrimaryKey));
}
UniqueKeyIndex(
HollowConsumer consumer,
Class<T> uniqueType,
PrimaryKey primaryTypeKey,
String fieldPath, Class<Q> matchFieldType) {
// @@@ Use FieldPaths.createFieldPathForPrimaryKey
this(consumer,
uniqueType,
primaryTypeKey,
Collections.singletonList(
MatchFieldPathArgumentExtractor
.fromPathAndType(consumer.getStateEngine(), uniqueType, fieldPath, matchFieldType,
FieldPaths::createFieldPathForPrimaryKey)));
}
/**
* Finds the unique object, an instance of the unique type, for a given key.
*
* @param key the key
* @return the unique object
*/
@Override
public T apply(Q key) {
return findMatch(key);
}
/**
* Finds the unique object, an instance of the unique type, for a given key.
*
* @param key the key
* @return the unique object
*/
public T findMatch(Q key) {
Object[] keyArray = new Object[matchFields.size()];
int keyArrayLogicalSize = 0;
for (int i = 0; i < matchFields.size(); i++)
{
Object matched = matchFields.get(i).extract(key);
if (matched != null) {
keyArray[keyArrayLogicalSize++] = matched;
}
}
int ordinal = -1;
if (keyArrayLogicalSize <= 0)
return null;
else if (keyArrayLogicalSize == 1)
ordinal = hpki.getMatchingOrdinal(keyArray[0]);
else if (keyArrayLogicalSize == 2)
ordinal = hpki.getMatchingOrdinal(keyArray[0], keyArray[1]);
else if (keyArrayLogicalSize == 3)
ordinal = hpki.getMatchingOrdinal(keyArray[0], keyArray[1], keyArray[2]);
else
ordinal = hpki.getMatchingOrdinal(keyArray);
if (ordinal == HollowConstants.ORDINAL_NONE) {
return null;
}
return uniqueTypeExtractor.extract(api, ordinal);
}
// HollowConsumer.RefreshListener
@Override public void refreshStarted(long currentVersion, long requestedVersion) {
}
@Override public void snapshotUpdateOccurred(HollowAPI api, HollowReadStateEngine stateEngine, long version) {
HollowPrimaryKeyIndex hpki = this.hpki;
hpki.detachFromDeltaUpdates();
hpki = new HollowPrimaryKeyIndex(consumer.getStateEngine(), hpki.getPrimaryKey());
hpki.listenForDeltaUpdates();
this.hpki = hpki;
this.api = api;
}
@Override public void deltaUpdateOccurred(HollowAPI api, HollowReadStateEngine stateEngine, long version) {
this.api = api;
}
@Override public void blobLoaded(HollowConsumer.Blob transition) {
}
@Override public void refreshSuccessful(long beforeVersion, long afterVersion, long requestedVersion) {
}
@Override public void refreshFailed(
long beforeVersion, long afterVersion, long requestedVersion, Throwable failureCause) {
}
// HollowConsumer.RefreshRegistrationListener
@Override public void onBeforeAddition(HollowConsumer c) {
if (c != consumer) {
throw new IllegalStateException("The index's consumer and the listener's consumer are not the same");
}
hpki.listenForDeltaUpdates();
}
@Override public void onAfterRemoval(HollowConsumer c) {
hpki.detachFromDeltaUpdates();
}
/**
* Starts the building of a {@link UniqueKeyIndex}.
*
* @param consumer the consumer containing instances of the given unique type
* @param uniqueType the unique type
* @param <T> the unique type
* @return a builder
*/
public static <T extends HollowObject> Builder<T> from(HollowConsumer consumer, Class<T> uniqueType) {
Objects.requireNonNull(consumer);
Objects.requireNonNull(uniqueType);
return new Builder<>(consumer, uniqueType);
}
/**
* The builder of a {@link UniqueKeyIndex}.
*
* @param <T> the unique type
*/
public static final class Builder<T extends HollowObject> {
final HollowConsumer consumer;
final Class<T> uniqueType;
PrimaryKey primaryTypeKey; // non-null on bindWithPrimaryKeyOnType
Builder(HollowConsumer consumer, Class<T> uniqueType) {
this.consumer = consumer;
this.uniqueType = uniqueType;
}
/**
* Binds the field paths with those of the primary key associated with the schema of the unique type.
*
* @throws com.netflix.hollow.api.error.SchemaNotFoundException if there is no schema for the unique
* type
* @throws IllegalArgumentException if there is no primary key associated with the unique type
*/
public Builder<T> bindToPrimaryKey() {
String primaryTypeName = HollowTypeMapper.getDefaultTypeName(uniqueType);
HollowSchema schema = consumer.getStateEngine().getNonNullSchema(primaryTypeName);
assert schema.getSchemaType() == HollowSchema.SchemaType.OBJECT;
this.primaryTypeKey = ((HollowObjectSchema) schema).getPrimaryKey();
if (primaryTypeKey == null) {
throw new IllegalArgumentException(
String.format("No primary key associated with primary type %s", uniqueType));
}
return this;
}
/**
* Creates a {@link UniqueKeyIndex} for matching with field paths and types declared by
* {@link FieldPath} annotated fields or methods on the given key type.
*
* @param keyType the key type
* @param <Q> the key type
* @return a {@code UniqueKeyIndex}
* @throws IllegalArgumentException if the key type declares one or more invalid field paths
* or invalid types given resolution of corresponding field paths
* @throws IllegalArgumentException if the builder is bound to the primary key of the unique type and
* the field paths declared by the key type are not the identical to those declared by the primary key
*/
public <Q> UniqueKeyIndex<T, Q> usingBean(Class<Q> keyType) {
Objects.requireNonNull(keyType);
return new UniqueKeyIndex<>(consumer, uniqueType, primaryTypeKey,
keyType);
}
/**
* Creates a {@link UniqueKeyIndex} for matching with a single key field path and type.
*
* @param keyFieldPath the key field path
* @param keyFieldType the key type
* @param <Q> the key type
* @return a {@code UniqueKeyIndex}
* @throws IllegalArgumentException if the key field path is empty or invalid
* @throws IllegalArgumentException if the key field type is invalid given resolution of the
* key field path
* @throws IllegalArgumentException if the builder is bound to the primary key of the unique type and
* the field path declared by the key type is not identical to the keyFieldPath
*/
public <Q> UniqueKeyIndex<T, Q> usingPath(String keyFieldPath, Class<Q> keyFieldType) {
Objects.requireNonNull(keyFieldPath);
if (keyFieldPath.isEmpty()) {
throw new IllegalArgumentException("keyFieldPath argument is an empty String");
}
Objects.requireNonNull(keyFieldType);
return new UniqueKeyIndex<>(consumer, uniqueType, primaryTypeKey,
keyFieldPath, keyFieldType);
}
}
}
| 9,351 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer/index/FieldPath.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.consumer.index;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* A field path associated with a field or method declaration whose type or
* return type respectively is associated resolution of the field path.
*
* @see com.netflix.hollow.core.index.FieldPaths
*/
@Retention(RetentionPolicy.RUNTIME)
@Target( {ElementType.FIELD, ElementType.METHOD}) public @interface FieldPath {
/**
* @return the field path, if empty then the path is derived from the field or method name.
*/
String value() default "";
/**
* @return the field path order
*/
int order() default 0;
}
| 9,352 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer/index/HashIndex.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.consumer.index;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.api.objects.HollowRecord;
import java.util.Objects;
/**
* A type safe hash index for indexing non-primary-key data.
* <p>
* This type of index can map multiple keys to a single matching record,
* and/or multiple records to a single key.
* <p>
* If the index is {@link HollowConsumer#addRefreshListener(HollowConsumer.RefreshListener) registered} with its
* associated {@link HollowConsumer} then the index will track updates and changes will be reflected in matched results
* (performed after such updates). When a registered index is no longer needed it should be
* {@link HollowConsumer#removeRefreshListener(HollowConsumer.RefreshListener) deregistered} to avoid unnecessary
* index recalculation and to ensure the index is reclaimed by the garbage collector.
*
* @param <T> the root, select, and result type
* @param <Q> the query type
*/
public final class HashIndex<T extends HollowRecord, Q> extends HashIndexSelect<T, T, Q> {
HashIndex(
HollowConsumer consumer,
Class<T> rootType,
Class<Q> matchedFieldsType) {
super(consumer,
rootType,
rootType, "",
matchedFieldsType);
}
HashIndex(
HollowConsumer consumer,
Class<T> rootType,
String fieldPath, Class<Q> matchedFieldType) {
super(consumer,
rootType,
rootType, "",
fieldPath, matchedFieldType);
}
/**
* Starts the building of a {@link HashIndex}.
*
* @param consumer the consumer containing instances of the given root type
* @param rootType the root type to match and select from
* @param <T> the root type
* @return a builder
*/
public static <T extends HollowRecord> Builder<T> from(HollowConsumer consumer, Class<T> rootType) {
Objects.requireNonNull(consumer);
Objects.requireNonNull(rootType);
return new Builder<>(consumer, rootType);
}
/**
* The builder of a {@link HashIndex} or a {@link HashIndexSelect}.
*
* @param <T> the root type
*/
public static final class Builder<T extends HollowRecord> {
final HollowConsumer consumer;
final Class<T> rootType;
Builder(HollowConsumer consumer, Class<T> rootType) {
this.consumer = consumer;
this.rootType = rootType;
}
/**
* Creates a {@link HashIndex} for matching with field paths and types declared by
* {@link FieldPath} annotated fields or methods on the given query type.
*
* @param queryType the query type
* @param <Q> the query type
* @return a {@code HashIndex}
* @throws IllegalArgumentException if the query type declares one or more invalid field paths
* or invalid types given resolution of corresponding field paths
*/
public <Q> HashIndex<T, Q> usingBean(Class<Q> queryType) {
Objects.requireNonNull(queryType);
return new HashIndex<>(consumer, rootType, queryType);
}
/**
* Creates a {@link HashIndex} for matching with a single query field path and type.
*
* @param queryFieldPath the query field path
* @param queryFieldType the query type
* @param <Q> the query type
* @return a {@code HashIndex}
* @throws IllegalArgumentException if the query field path is empty or invalid
* @throws IllegalArgumentException if the query field type is invalid given resolution of the
* query field path
*/
public <Q> HashIndex<T, Q> usingPath(String queryFieldPath, Class<Q> queryFieldType) {
Objects.requireNonNull(queryFieldPath);
if (queryFieldPath.isEmpty()) {
throw new IllegalArgumentException("queryFieldPath argument is an empty String");
}
Objects.requireNonNull(queryFieldType);
return new HashIndex<>(consumer, rootType, queryFieldPath, queryFieldType);
}
/**
* Transitions to build a hash index with result selection.
*
* @param selectFieldPath the select field path
* @param selectFieldType the select, and result, field type associated with the
* resolved select field path
* @param <S> the select type
* @return a builder of a {@link HashIndexSelect}
*/
public <S extends HollowRecord> BuilderWithSelect<T, S> selectField(
String selectFieldPath, Class<S> selectFieldType) {
Objects.requireNonNull(selectFieldPath);
Objects.requireNonNull(selectFieldType);
return new BuilderWithSelect<>(consumer, rootType, selectFieldPath, selectFieldType);
}
}
}
| 9,353 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer/index/MatchFieldPathArgumentExtractor.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.consumer.index;
import static java.util.stream.Collectors.toList;
import com.netflix.hollow.api.objects.HollowObject;
import com.netflix.hollow.api.objects.HollowRecord;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.index.FieldPaths;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.write.objectmapper.HollowObjectTypeMapper;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.lang.reflect.AnnotatedElement;
import java.lang.reflect.Field;
import java.lang.reflect.Member;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Comparator;
import java.util.List;
import java.util.function.Function;
import java.util.stream.Stream;
/**
* An extractor that extracts an argument value from an instance of a holding type for an associated match field
* path, transforming the value if necessary from a {@code HollowRecord} to an ordinal integer value.
*
* @param <Q> query type
*/
final class MatchFieldPathArgumentExtractor<Q> {
/**
* A resolver of a field path.
*/
interface FieldPathResolver {
/**
* Resolves a field path to a {@link FieldPaths.FieldPath}.
*/
FieldPaths.FieldPath<? extends FieldPaths.FieldSegment> resolve(
HollowDataset hollowDataAccess, String type, String fieldPath);
}
final FieldPaths.FieldPath<? extends FieldPaths.FieldSegment> fieldPath;
final Function<Q, Object> extractor;
MatchFieldPathArgumentExtractor(
FieldPaths.FieldPath<? extends FieldPaths.FieldSegment> fieldPath, Function<Q, ?> extractor) {
this.fieldPath = fieldPath;
@SuppressWarnings("unchecked")
Function<Q, Object> erasedResultExtractor = (Function<Q, Object>) extractor;
this.extractor = erasedResultExtractor;
}
Object extract(Q v) {
return extractor.apply(v);
}
static <Q> List<MatchFieldPathArgumentExtractor<Q>> fromHolderClass(
HollowDataset dataset, Class<?> rootType, Class<Q> holder,
FieldPathResolver fpResolver) {
// @@@ Check for duplicates
// @@@ Cache result for Q, needs to be associated with dataset
// and resolving kind (currently implicit to implementation of fpResolver)
// @@@ Support holder type of Object[] accepting an instance of String[] for field paths
// on construction and Object[] on match enabling "reflective" operation if
// static beans are not desired
// Query annotated fields
Stream<Field> fields = Stream.of(holder.getDeclaredFields())
.filter(f -> f.isAnnotationPresent(FieldPath.class));
// Query annotated methods (abstract or concrete) that have
// a return type and no parameter types
Stream<Method> methods = Stream.of(holder.getDeclaredMethods())
.filter(m -> m.isAnnotationPresent(FieldPath.class))
.filter(m -> m.getReturnType() != void.class)
.filter(m -> m.getParameterCount() == 0)
.filter(m -> !m.isSynthetic())
.filter(m -> !Modifier.isNative(m.getModifiers()));
return Stream.concat(fields, methods)
.sorted(Comparator.comparingInt(f -> f.getDeclaredAnnotation(FieldPath.class).order()))
.map(ae -> {
try {
if (ae instanceof Field) {
return MatchFieldPathArgumentExtractor.<Q>fromField(dataset, rootType, (Field) ae,
fpResolver);
} else {
return MatchFieldPathArgumentExtractor.<Q>fromMethod(dataset, rootType, (Method) ae,
fpResolver);
}
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
})
.collect(toList());
}
static <Q> MatchFieldPathArgumentExtractor<Q> fromField(
HollowDataset dataset, Class<?> rootType, Field f,
FieldPathResolver fpResolver)
throws IllegalAccessException {
f.setAccessible(true);
return fromHandle(dataset, rootType, getFieldPath(f), MethodHandles.lookup().unreflectGetter(f),
fpResolver);
}
static <Q> MatchFieldPathArgumentExtractor<Q> fromMethod(
HollowDataset dataset, Class<?> rootType, Method m,
FieldPathResolver fpResolver)
throws IllegalAccessException {
if (m.getReturnType() == void.class || m.getParameterCount() > 0) {
throw new IllegalArgumentException(String.format(
"A @FieldPath annotated method must have zero parameters and a non-void return type: %s",
m.toGenericString()));
}
m.setAccessible(true);
return fromHandle(dataset, rootType, getFieldPath(m), MethodHandles.lookup().unreflect(m),
fpResolver);
}
static <Q> MatchFieldPathArgumentExtractor<Q> fromHandle(
HollowDataset dataset, Class<?> rootType, String fieldPath, MethodHandle mh,
FieldPathResolver fpResolver) {
return fromFunction(dataset, rootType, fieldPath, mh.type().returnType(), getterGenericExtractor(mh),
fpResolver);
}
static <T> MatchFieldPathArgumentExtractor<T> fromPathAndType(
HollowDataset dataset, Class<?> rootType, String fieldPath, Class<T> type,
FieldPathResolver fpResolver) {
return fromFunction(dataset, rootType, fieldPath, type, Function.identity(),
fpResolver);
}
static IllegalArgumentException incompatibleMatchType(
Class<?> extractorType, String fieldPath,
HollowObjectSchema.FieldType schemaFieldType) {
return new IllegalArgumentException(
String.format("Match type %s incompatible with field path %s resolving to field of value type %s",
extractorType.getName(), fieldPath, schemaFieldType));
}
static IllegalArgumentException incompatibleMatchType(
Class<?> extractorType, String fieldPath, String typeName) {
return new IllegalArgumentException(
String.format(
"Match type %s incompatible with field path %s resolving to field of reference type %s",
extractorType.getName(), fieldPath, typeName));
}
static <Q, T> MatchFieldPathArgumentExtractor<Q> fromFunction(
HollowDataset dataset, Class<?> rootType, String fieldPath,
Class<T> extractorType, Function<Q, T> extractorFunction,
FieldPathResolver fpResolver) {
String rootTypeName = HollowObjectTypeMapper.getDefaultTypeName(rootType);
FieldPaths.FieldPath<? extends FieldPaths.FieldSegment> fp = fpResolver.resolve(dataset, rootTypeName,
fieldPath);
// @@@ Method on FieldPath
FieldPaths.FieldSegment lastSegment = fp.getSegments().get(fp.getSegments().size() - 1);
HollowObjectSchema.FieldType schemaFieldType;
if (lastSegment.getEnclosingSchema().getSchemaType() == HollowSchema.SchemaType.OBJECT) {
FieldPaths.ObjectFieldSegment os = (FieldPaths.ObjectFieldSegment) lastSegment;
schemaFieldType = os.getType();
} else {
schemaFieldType = HollowObjectSchema.FieldType.REFERENCE;
}
Function<Q, ?> extractor = extractorFunction;
switch (schemaFieldType) {
case BOOLEAN:
if (extractorType != boolean.class && extractorType != Boolean.class) {
throw incompatibleMatchType(extractorType, fieldPath, schemaFieldType);
}
break;
case DOUBLE:
if (extractorType != double.class && extractorType != Double.class) {
throw incompatibleMatchType(extractorType, fieldPath, schemaFieldType);
}
break;
case FLOAT:
if (extractorType != float.class && extractorType != Float.class) {
throw incompatibleMatchType(extractorType, fieldPath, schemaFieldType);
}
break;
case INT:
if (extractorType == byte.class || extractorType == Byte.class) {
@SuppressWarnings("unchecked")
Function<Q, Byte> f = (Function<Q, Byte>) extractorFunction;
extractor = f.andThen(Byte::intValue);
break;
} else if (extractorType == short.class || extractorType == Short.class) {
@SuppressWarnings("unchecked")
Function<Q, Short> f = (Function<Q, Short>) extractorFunction;
extractor = f.andThen(Short::intValue);
break;
} else if (extractorType == char.class || extractorType == Character.class) {
@SuppressWarnings("unchecked")
Function<Q, Character> f = (Function<Q, Character>) extractorFunction;
extractor = f.andThen(c -> (int) c);
} else if (extractorType != int.class && extractorType != Integer.class) {
throw incompatibleMatchType(extractorType, fieldPath, schemaFieldType);
}
break;
case LONG:
if (extractorType != long.class && extractorType != Long.class) {
throw incompatibleMatchType(extractorType, fieldPath, schemaFieldType);
}
break;
case REFERENCE: {
// @@@ If extractorType == int.class then consider it an ordinal value
// and directly use the extractorFunction
String typeName = lastSegment.getTypeName();
// Manage for String and all box types
if (typeName.equals("String")) {
if (!HollowObject.class.isAssignableFrom(extractorType)) {
throw incompatibleMatchType(extractorType, fieldPath, typeName);
}
// @@@ Check that object schema has single value field of String type such as HString
} else if (!extractorType.getSimpleName().equals(typeName)) {
throw incompatibleMatchType(extractorType, fieldPath, typeName);
} else if (!HollowRecord.class.isAssignableFrom(extractorType)) {
throw incompatibleMatchType(extractorType, fieldPath, typeName);
}
@SuppressWarnings("unchecked")
Function<Q, HollowRecord> f = (Function<Q, HollowRecord>) extractorFunction;
extractor = f.andThen(HollowRecord::getOrdinal);
break;
}
case BYTES:
if (extractorType != byte[].class) {
throw incompatibleMatchType(extractorType, fieldPath, schemaFieldType);
}
break;
case STRING:
if (extractorType == char[].class) {
@SuppressWarnings("unchecked")
Function<Q, char[]> f = (Function<Q, char[]>) extractorFunction;
extractor = f.andThen(String::valueOf);
break;
} else if (extractorType != String.class) {
throw incompatibleMatchType(extractorType, fieldPath, schemaFieldType);
}
break;
}
return new MatchFieldPathArgumentExtractor<>(fp, extractor);
}
private static <Q, T> Function<Q, T> getterGenericExtractor(MethodHandle getter) {
return h -> {
try {
@SuppressWarnings("unchecked")
T t = (T) getter.invoke(h);
return t;
} catch (RuntimeException | Error e) {
throw e;
} catch (Throwable e) {
throw new RuntimeException(e);
}
};
}
private static String getFieldPath(Field f) {
return getFieldPath(f, f);
}
private static String getFieldPath(Method m) {
return getFieldPath(m, m);
}
private static String getFieldPath(Member m, AnnotatedElement e) {
FieldPath fpa = e.getDeclaredAnnotation(FieldPath.class);
if (fpa == null) {
return m.getName();
}
String fieldPath = e.getDeclaredAnnotation(FieldPath.class).value();
if (fieldPath.isEmpty()) {
return m.getName();
}
return fieldPath;
}
}
| 9,354 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer/fs/HollowFilesystemBlobRetriever.java | /*
* Copyright 2016-2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.consumer.fs;
import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.api.consumer.HollowConsumer.Blob.BlobType;
import com.netflix.hollow.core.HollowConstants;
import com.netflix.hollow.core.read.OptionalBlobPartInput;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.logging.Logger;
public class HollowFilesystemBlobRetriever implements HollowConsumer.BlobRetriever {
private static final Logger LOG = Logger.getLogger(HollowFilesystemBlobRetriever.class.getName());
private final Path blobStorePath;
private final HollowConsumer.BlobRetriever fallbackBlobRetriever;
private final boolean useExistingStaleSnapshot;
private final Set<String> optionalBlobParts;
/**
* A new HollowFilesystemBlobRetriever which is not backed by a remote store.
*
* @param blobStorePath The directory from which to retrieve blobs
* @since 2.12.0
*/
@SuppressWarnings("unused")
public HollowFilesystemBlobRetriever(Path blobStorePath) {
this(blobStorePath, null, false);
}
/**
* A new HollowFileSystemBlobRetriever which is backed by a remote store. When a blob from the remote store
* is requested which exists locally, then the local copy is used. When a blob from the remote store is
* requested which does not exist locally, it is copied to the filesystem right before it is loaded.
*
* @param blobStorePath The directory from which to retrieve blobs, if available
* @param fallbackBlobRetriever The remote blob retriever from which to retrieve blobs if they are not already
* available on the filesystem.
* @since 2.12.0
*/
public HollowFilesystemBlobRetriever(Path blobStorePath, HollowConsumer.BlobRetriever fallbackBlobRetriever) {
this(blobStorePath, fallbackBlobRetriever, false);
}
/**
* A new HollowFileSystemBlobRetriever which is backed by a remote store. When a blob from the remote store
* is requested which exists locally, then the local copy is used. When a blob from the remote store is
* requested which does not exist locally, it is copied to the filesystem right before it is loaded.
*
* @param blobStorePath The directory from which to retrieve blobs, if available
* @param fallbackBlobRetriever The remote blob retriever from which to retrieve blobs if they are not already
* available on the filesystem.
* @param useExistingStaleSnapshot If true and a snapshot blob is requested then if there exists a local snapshot
* blob present for a version older than the desired version then that snapshot blob is
* returned and the fallback blob retriever (if present) is not queried.
*/
public HollowFilesystemBlobRetriever(Path blobStorePath, HollowConsumer.BlobRetriever fallbackBlobRetriever, boolean useExistingStaleSnapshot) {
this.blobStorePath = blobStorePath;
this.fallbackBlobRetriever = fallbackBlobRetriever;
this.useExistingStaleSnapshot = useExistingStaleSnapshot;
this.optionalBlobParts = fallbackBlobRetriever == null ? null : fallbackBlobRetriever.configuredOptionalBlobParts();
ensurePathExists(blobStorePath);
}
/**
* A new HollowFilesystemBlobRetriever which is not backed by a remote store.
*
* Uses the configured optional blob parts
*
* @param blobStorePath The directory from which to retrieve blobs
* @since 2.12.0
*/
public HollowFilesystemBlobRetriever(Path blobStorePath, Set<String> optionalBlobParts) {
this.blobStorePath = blobStorePath;
this.optionalBlobParts = optionalBlobParts;
this.useExistingStaleSnapshot = true;
this.fallbackBlobRetriever = null;
ensurePathExists(blobStorePath);
}
private void ensurePathExists(Path blobStorePath) {
try {
if(!Files.exists(this.blobStorePath)){
Files.createDirectories(this.blobStorePath);
}
} catch (IOException e) {
throw new RuntimeException("Could not create folder for blobRetriever; path=" + blobStorePath, e);
}
}
@Override
public HollowConsumer.HeaderBlob retrieveHeaderBlob(long desiredVersion) {
Path exactPath = blobStorePath.resolve("header-" + desiredVersion);
if (Files.exists(exactPath))
return new FilesystemHeaderBlob(exactPath, desiredVersion);
long maxVersionBeforeDesired = HollowConstants.VERSION_NONE;
try(DirectoryStream<Path> directoryStream = Files.newDirectoryStream(blobStorePath)) {
for (Path path : directoryStream) {
String filename = path.getFileName().toString();
if(filename.startsWith("header-")) {
long version = Long.parseLong(filename.substring(filename.lastIndexOf("-") + 1));
if(version < desiredVersion && version > maxVersionBeforeDesired) {
maxVersionBeforeDesired = version;
}
}
}
} catch(IOException ex) {
throw new RuntimeException("Error listing header files; path=" + blobStorePath, ex);
}
HollowConsumer.HeaderBlob filesystemBlob = null;
if (maxVersionBeforeDesired != HollowConstants.VERSION_NONE) {
filesystemBlob = new FilesystemHeaderBlob(blobStorePath.resolve("snapshot-" + maxVersionBeforeDesired), maxVersionBeforeDesired);
if (useExistingStaleSnapshot) {
return filesystemBlob;
}
}
if(fallbackBlobRetriever != null) {
HollowConsumer.HeaderBlob remoteBlob = fallbackBlobRetriever.retrieveHeaderBlob(desiredVersion);
if(remoteBlob != null && (filesystemBlob == null || remoteBlob.getVersion() != filesystemBlob.getVersion()))
return new HeaderBlobFromBackupToFilesystem(remoteBlob, blobStorePath.resolve("header-" + remoteBlob.getVersion()));
}
return filesystemBlob;
}
@Override
public HollowConsumer.Blob retrieveSnapshotBlob(long desiredVersion) {
Path exactPath = blobStorePath.resolve("snapshot-" + desiredVersion);
if(Files.exists(exactPath) && allRequestedPartsExist(BlobType.SNAPSHOT, -1L, desiredVersion))
return filesystemBlob(BlobType.SNAPSHOT, -1L, desiredVersion);
long maxVersionBeforeDesired = HollowConstants.VERSION_NONE;
try(DirectoryStream<Path> directoryStream = Files.newDirectoryStream(blobStorePath)) {
for (Path path : directoryStream) {
String filename = path.getFileName().toString();
if(filename.startsWith("snapshot-")) {
long version;
try {
version = Long.parseLong(filename.substring(filename.lastIndexOf("-") + 1));
} catch (NumberFormatException ex) { // for e.g. file snapshot-20230212155028322.133f8fdd
LOG.info("Ignoring ineligible file in local blob store: " + path);
continue;
}
if(version < desiredVersion && version > maxVersionBeforeDesired && allRequestedPartsExist(BlobType.SNAPSHOT, -1L, version)) {
maxVersionBeforeDesired = version;
}
}
}
} catch(IOException ex) {
throw new RuntimeException("Error listing snapshot files; path=" + blobStorePath, ex);
}
HollowConsumer.Blob filesystemBlob = null;
if (maxVersionBeforeDesired != HollowConstants.VERSION_NONE) {
filesystemBlob = filesystemBlob(BlobType.SNAPSHOT, -1L, maxVersionBeforeDesired);
if (useExistingStaleSnapshot) {
return filesystemBlob;
}
}
if(fallbackBlobRetriever != null) {
HollowConsumer.Blob remoteBlob = fallbackBlobRetriever.retrieveSnapshotBlob(desiredVersion);
if(remoteBlob != null && (filesystemBlob == null || remoteBlob.getToVersion() != filesystemBlob.getToVersion()))
return new BlobForBackupToFilesystem(remoteBlob, blobStorePath.resolve("snapshot-" + remoteBlob.getToVersion()));
}
return filesystemBlob;
}
private HollowConsumer.Blob filesystemBlob(HollowConsumer.Blob.BlobType type, long currentVersion, long destinationVersion) {
Path path;
Map<String, Path> optionalPartPaths = null;
switch(type) {
case SNAPSHOT:
path = blobStorePath.resolve("snapshot-" + destinationVersion);
if(optionalBlobParts != null && !optionalBlobParts.isEmpty()) {
optionalPartPaths = new HashMap<>(optionalBlobParts.size());
for(String part : optionalBlobParts) {
optionalPartPaths.put(part, blobStorePath.resolve("snapshot_"+part+"-"+destinationVersion));
}
}
return new FilesystemBlob(path, destinationVersion, optionalPartPaths);
case DELTA:
path = blobStorePath.resolve("delta-" + currentVersion + "-" + destinationVersion);
if(optionalBlobParts != null && !optionalBlobParts.isEmpty()) {
optionalPartPaths = new HashMap<>(optionalBlobParts.size());
for(String part : optionalBlobParts) {
optionalPartPaths.put(part, blobStorePath.resolve("delta_"+part+"-"+currentVersion+"-"+destinationVersion));
}
}
return new FilesystemBlob(path, currentVersion, destinationVersion, optionalPartPaths);
case REVERSE_DELTA:
path = blobStorePath.resolve("reversedelta-" + currentVersion + "-" + destinationVersion);
if(optionalBlobParts != null && !optionalBlobParts.isEmpty()) {
optionalPartPaths = new HashMap<>(optionalBlobParts.size());
for(String part : optionalBlobParts) {
optionalPartPaths.put(part, blobStorePath.resolve("reversedelta_"+part+"-"+currentVersion+"-"+destinationVersion));
}
}
return new FilesystemBlob(path, currentVersion, destinationVersion, optionalPartPaths);
default:
throw new IllegalArgumentException("Unknown BlobType: " + type.toString());
}
}
@Override
public HollowConsumer.Blob retrieveDeltaBlob(long currentVersion) {
try(DirectoryStream<Path> directoryStream = Files.newDirectoryStream(blobStorePath)) {
for (Path path : directoryStream) {
String filename = path.getFileName().toString();
if(filename.startsWith("delta-" + currentVersion)) {
long destinationVersion = Long.parseLong(filename.substring(filename.lastIndexOf("-") + 1));
if(allRequestedPartsExist(BlobType.DELTA, currentVersion, destinationVersion))
return filesystemBlob(BlobType.DELTA, currentVersion, destinationVersion);
}
}
} catch(IOException ex) {
throw new RuntimeException("Error listing delta files; path=" + blobStorePath, ex);
}
if(fallbackBlobRetriever != null) {
HollowConsumer.Blob remoteBlob = fallbackBlobRetriever.retrieveDeltaBlob(currentVersion);
if(remoteBlob != null)
return new BlobForBackupToFilesystem(remoteBlob, blobStorePath.resolve("delta-" + remoteBlob.getFromVersion() + "-" + remoteBlob.getToVersion()));
}
return null;
}
@Override
public HollowConsumer.Blob retrieveReverseDeltaBlob(long currentVersion) {
try(DirectoryStream<Path> directoryStream = Files.newDirectoryStream(blobStorePath)) {
for (Path path : directoryStream) {
String filename = path.getFileName().toString();
if(filename.startsWith("reversedelta-" + currentVersion)) {
long destinationVersion = Long.parseLong(filename.substring(filename.lastIndexOf("-") + 1));
if(allRequestedPartsExist(BlobType.REVERSE_DELTA, currentVersion, destinationVersion))
return filesystemBlob(BlobType.REVERSE_DELTA, currentVersion, destinationVersion);
}
}
} catch(IOException ex) {
throw new RuntimeException("Error listing reverse delta files; path=" + blobStorePath, ex);
}
if(fallbackBlobRetriever != null) {
HollowConsumer.Blob remoteBlob = fallbackBlobRetriever.retrieveReverseDeltaBlob(currentVersion);
if(remoteBlob != null)
return new BlobForBackupToFilesystem(remoteBlob, blobStorePath.resolve("reversedelta-" + remoteBlob.getFromVersion() + "-" + remoteBlob.getToVersion()));
}
return null;
}
private boolean allRequestedPartsExist(HollowConsumer.Blob.BlobType type, long currentVersion, long destinationVersion) {
if(optionalBlobParts == null || optionalBlobParts.isEmpty())
return true;
for(String part : optionalBlobParts) {
String filename = null;
switch(type) {
case SNAPSHOT:
filename = "snapshot_" + part + "-" + destinationVersion;
break;
case DELTA:
filename = "delta_" + part + "-" + currentVersion + "-" + destinationVersion;
break;
case REVERSE_DELTA:
filename = "reversedelta_" + part + "-" + currentVersion + "-" + destinationVersion;
break;
}
if(!Files.exists(blobStorePath.resolve(filename)))
return false;
}
return true;
}
private static class FilesystemHeaderBlob extends HollowConsumer.HeaderBlob {
private final Path path;
protected FilesystemHeaderBlob(Path headerPath, long version) {
super(version);
this.path = headerPath;
}
@Override
public InputStream getInputStream() throws IOException {
return new BufferedInputStream(Files.newInputStream(path));
}
@Override
public File getFile() throws IOException {
return path.toFile();
}
}
private static class FilesystemBlob extends HollowConsumer.Blob {
private final Path path;
private final Map<String, Path> optionalPartPaths;
@Deprecated
FilesystemBlob(File snapshotFile, long toVersion) {
this(snapshotFile.toPath(), toVersion);
}
/**
* @since 2.12.0
*/
FilesystemBlob(Path snapshotPath, long toVersion) {
this(snapshotPath, toVersion, null);
}
/**
* @since 2.12.0
*/
FilesystemBlob(Path deltaPath, long fromVersion, long toVersion) {
this(deltaPath, fromVersion, toVersion, null);
}
FilesystemBlob(Path snapshotPath, long toVersion, Map<String, Path> optionalPartPaths) {
super(toVersion);
this.path = snapshotPath;
this.optionalPartPaths = optionalPartPaths;
}
FilesystemBlob(Path deltaPath, long fromVersion, long toVersion, Map<String, Path> optionalPartPaths) {
super(fromVersion, toVersion);
this.path = deltaPath;
this.optionalPartPaths = optionalPartPaths;
}
@Override
public InputStream getInputStream() throws IOException {
return new BufferedInputStream(Files.newInputStream(path));
}
@Override
public OptionalBlobPartInput getOptionalBlobPartInputs() throws IOException {
if(optionalPartPaths == null || optionalPartPaths.isEmpty())
return null;
OptionalBlobPartInput input = new OptionalBlobPartInput();
for(Map.Entry<String, Path> pathEntry : optionalPartPaths.entrySet()) {
input.addInput(pathEntry.getKey(), pathEntry.getValue().toFile());
}
return input;
}
@Override
public File getFile() throws IOException {
return path.toFile();
}
}
private static class HeaderBlobFromBackupToFilesystem extends HollowConsumer.HeaderBlob {
private final HollowConsumer.HeaderBlob remoteHeaderBlob;
private final Path path;
protected HeaderBlobFromBackupToFilesystem(HollowConsumer.HeaderBlob remoteHeaderBlob, Path destinationPath) {
super(remoteHeaderBlob.getVersion());
this.path = destinationPath;
this.remoteHeaderBlob = remoteHeaderBlob;
}
@Override
public InputStream getInputStream() throws IOException {
Path tempPath = path.resolveSibling(path.getName(path.getNameCount()-1) + "-" + UUID.randomUUID().toString());
try(
InputStream is = remoteHeaderBlob.getInputStream();
OutputStream os = Files.newOutputStream(tempPath)
) {
byte buf[] = new byte[4096];
int n;
while (-1 != (n = is.read(buf)))
os.write(buf, 0, n);
}
Files.move(tempPath, path, REPLACE_EXISTING);
return new BufferedInputStream(Files.newInputStream(path));
}
@Override
public File getFile() throws IOException {
Path tempPath = path.resolveSibling(path.getName(path.getNameCount()-1) + "-" + UUID.randomUUID().toString());
try(
InputStream is = remoteHeaderBlob.getInputStream();
OutputStream os = Files.newOutputStream(tempPath)
) {
byte buf[] = new byte[4096];
int n;
while (-1 != (n = is.read(buf)))
os.write(buf, 0, n);
}
Files.move(tempPath, path, REPLACE_EXISTING);
return path.toFile();
}
}
private static class BlobForBackupToFilesystem extends HollowConsumer.Blob {
private final HollowConsumer.Blob remoteBlob;
private final Path path;
BlobForBackupToFilesystem(HollowConsumer.Blob remoteBlob, Path destinationPath) {
super(remoteBlob.getFromVersion(), remoteBlob.getToVersion());
this.path = destinationPath;
this.remoteBlob = remoteBlob;
}
@Override
public InputStream getInputStream() throws IOException {
Path tempPath = path.resolveSibling(path.getName(path.getNameCount()-1) + "-" + UUID.randomUUID().toString());
try(
InputStream is = remoteBlob.getInputStream();
OutputStream os = Files.newOutputStream(tempPath)
) {
byte buf[] = new byte[4096];
int n;
while (-1 != (n = is.read(buf)))
os.write(buf, 0, n);
}
Files.move(tempPath, path, REPLACE_EXISTING);
return new BufferedInputStream(Files.newInputStream(path));
}
@Override
public File getFile() throws IOException {
Path tempPath = path.resolveSibling(path.getName(path.getNameCount()-1) + "-" + UUID.randomUUID().toString());
try(
InputStream is = remoteBlob.getInputStream();
OutputStream os = Files.newOutputStream(tempPath)
) {
byte buf[] = new byte[4096];
int n;
while (-1 != (n = is.read(buf)))
os.write(buf, 0, n);
}
Files.move(tempPath, path, REPLACE_EXISTING);
return path.toFile();
}
@Override
public OptionalBlobPartInput getOptionalBlobPartInputs() throws IOException {
OptionalBlobPartInput remoteOptionalParts = remoteBlob.getOptionalBlobPartInputs();
if(remoteOptionalParts == null)
return null;
OptionalBlobPartInput localOptionalParts = new OptionalBlobPartInput();
for(Map.Entry<String, InputStream> entry : remoteOptionalParts.getInputStreamsByPartName().entrySet()) {
Path tempPath = path.resolveSibling(path.getName(path.getNameCount()-1) + "_" + entry.getKey() + "-" + UUID.randomUUID().toString());
Path destPath = getBlobType() == BlobType.SNAPSHOT ?
path.resolveSibling(getBlobType().getType() + "_" + entry.getKey() + "-" + getToVersion())
: path.resolveSibling(getBlobType().getType() + "_" + entry.getKey() + "-" + getFromVersion() + "-" + getToVersion());
try(
InputStream is = entry.getValue();
OutputStream os = Files.newOutputStream(tempPath)
) {
byte buf[] = new byte[4096];
int n;
while (-1 != (n = is.read(buf, 0, buf.length)))
os.write(buf, 0, n);
}
Files.move(tempPath, destPath, REPLACE_EXISTING);
localOptionalParts.addInput(entry.getKey(), new BufferedInputStream(Files.newInputStream(destPath)));
}
return localOptionalParts;
}
}
} | 9,355 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer/fs/HollowFilesystemAnnouncementWatcher.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.consumer.fs;
import static com.netflix.hollow.core.util.Threads.daemonThread;
import static java.nio.file.Files.getLastModifiedTime;
import static java.util.concurrent.Executors.newScheduledThreadPool;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.api.producer.fs.HollowFilesystemAnnouncer;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.FileTime;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
public class HollowFilesystemAnnouncementWatcher implements HollowConsumer.AnnouncementWatcher {
private static final Logger log = Logger.getLogger(HollowFilesystemAnnouncementWatcher.class.getName());
private final Path announcePath;
private final List<HollowConsumer> subscribedConsumers;
private final ScheduledExecutorService executor;
private final ScheduledFuture<?> watchFuture;
private boolean ownedExecutor;
private long latestVersion;
/**
* Creates a file system announcement watcher.
*
* @param publishPath the publish path
* @since 2.12.0
*/
@SuppressWarnings("unused")
public HollowFilesystemAnnouncementWatcher(Path publishPath) {
this(publishPath, newScheduledThreadPool(1,
r -> daemonThread(r, HollowFilesystemAnnouncementWatcher.class, "watch; path=" + publishPath)));
ownedExecutor = true;
}
/**
* Creates a file system announcement watcher.
*
* @param publishPath the publish path
* @param executor the executor from which watching is executed
* @since 2.12.0
*/
@SuppressWarnings("WeakerAccess")
public HollowFilesystemAnnouncementWatcher(Path publishPath, ScheduledExecutorService executor) {
this.executor = executor;
this.announcePath = publishPath.resolve(HollowFilesystemAnnouncer.ANNOUNCEMENT_FILENAME);
this.subscribedConsumers = new CopyOnWriteArrayList<>();
this.latestVersion = readLatestVersion();
this.watchFuture = setupWatch();
}
@Override
protected void finalize() throws Throwable {
super.finalize();
watchFuture.cancel(true);
if (ownedExecutor) {
executor.shutdownNow();
}
}
private ScheduledFuture setupWatch() {
return executor.scheduleWithFixedDelay(new Watch(this), 0, 1, TimeUnit.SECONDS);
}
@Override
public long getLatestVersion() {
return latestVersion;
}
@Override
public void subscribeToUpdates(final HollowConsumer consumer) {
subscribedConsumers.add(consumer);
}
private long readLatestVersion() {
if (!Files.isReadable(announcePath))
return NO_ANNOUNCEMENT_AVAILABLE;
try (BufferedReader reader = new BufferedReader(new FileReader(announcePath.toFile()))) {
return Long.parseLong(reader.readLine());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
static class Watch implements Runnable {
private FileTime previousFileTime = FileTime.from(0, TimeUnit.MILLISECONDS);
private final WeakReference<HollowFilesystemAnnouncementWatcher> ref;
Watch(HollowFilesystemAnnouncementWatcher watcher) {
ref = new WeakReference<>(watcher);
}
@Override
public void run() {
try {
HollowFilesystemAnnouncementWatcher watcher = ref.get();
if (watcher != null) {
if (!Files.isReadable(watcher.announcePath)) return;
FileTime lastModifiedTime = getLastModifiedTime(watcher.announcePath);
if (lastModifiedTime.compareTo(previousFileTime) > 0) {
previousFileTime = lastModifiedTime;
long currentVersion = watcher.readLatestVersion();
if (watcher.latestVersion != currentVersion) {
watcher.latestVersion = currentVersion;
for (HollowConsumer consumer : watcher.subscribedConsumers)
consumer.triggerAsyncRefresh();
}
}
}
} catch (Exception ex) {
log.log(Level.WARNING, "Exception reading the current announced version", ex);
} catch (Throwable th) {
log.log(Level.SEVERE, "Exception reading the current announced version", th);
throw th;
}
}
}
}
| 9,356 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer/data/AbstractHollowDataAccessor.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.hollow.api.consumer.data;
import static java.util.Objects.requireNonNull;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.core.index.HollowPrimaryKeyIndex;
import com.netflix.hollow.core.index.key.HollowPrimaryKeyValueDeriver;
import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.engine.HollowTypeReadState;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowSchema;
import com.netflix.hollow.core.schema.HollowSchema.SchemaType;
import com.netflix.hollow.core.util.AllHollowRecordCollection;
import com.netflix.hollow.core.util.HollowRecordCollection;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
public abstract class AbstractHollowDataAccessor<T> {
protected final String type;
protected final PrimaryKey primaryKey;
protected final HollowReadStateEngine rStateEngine;
protected final HollowTypeReadState typeState;
private BitSet removedOrdinals = new BitSet();
private BitSet addedOrdinals = new BitSet();
private List<UpdatedRecord<T>> updatedRecords = Collections.emptyList();
private boolean isDataChangeComputed = false;
public AbstractHollowDataAccessor(HollowConsumer consumer, String type) {
this(consumer.getStateEngine(), type);
}
public AbstractHollowDataAccessor(HollowReadStateEngine rStateEngine, String type) {
this(rStateEngine, type, (PrimaryKey) null);
}
public AbstractHollowDataAccessor(HollowReadStateEngine rStateEngine, String type, String... fieldPaths) {
this(rStateEngine, type, new PrimaryKey(type, fieldPaths));
}
public AbstractHollowDataAccessor(HollowReadStateEngine rStateEngine, String type, PrimaryKey primaryKey) {
this.rStateEngine = requireNonNull(rStateEngine, "read state required");
this.typeState = requireNonNull(rStateEngine.getTypeState(type),
"type not loaded or does not exist in dataset; type=" + type);
HollowSchema schema = typeState.getSchema();
if (schema instanceof HollowObjectSchema) {
this.type = type;
if (primaryKey == null) {
HollowObjectSchema oSchema = ((HollowObjectSchema) schema);
this.primaryKey = oSchema.getPrimaryKey();
} else {
this.primaryKey = primaryKey;
}
if (this.primaryKey == null)
throw new RuntimeException(String.format("Unsupported DataType=%s with SchemaType=%s : %s", type, schema.getSchemaType(), "PrimaryKey is missing"));
} else {
throw new RuntimeException(String.format("Unsupported DataType=%s with SchemaType=%s : %s", type, schema.getSchemaType(), "Only supported type=" + SchemaType.OBJECT));
}
}
/**
* Indicate whether Data Accessor contains prior state
*
* NOTE: This is critical since loading a Snapshot will not contain any information about changes from prior state
*
* @return true indicate it contains prior state
*/
public boolean hasPriorState() {
return !typeState.getPreviousOrdinals().isEmpty();
}
/**
* Compute Data Change
*/
public synchronized void computeDataChange() {
if (isDataChangeComputed) return;
computeDataChange(type, rStateEngine, primaryKey);
isDataChangeComputed = true;
}
/**
* @return true if data change has been computed
*/
public boolean isDataChangeComputed() {
return isDataChangeComputed;
}
protected void computeDataChange(String type, HollowReadStateEngine stateEngine, PrimaryKey primaryKey) {
HollowTypeReadState typeState = stateEngine.getTypeDataAccess(type).getTypeState();
BitSet previousOrdinals = typeState.getPreviousOrdinals();
BitSet currentOrdinals = typeState.getPopulatedOrdinals();
// track removed ordinals
removedOrdinals = new BitSet();
removedOrdinals.or(previousOrdinals);
removedOrdinals.andNot(currentOrdinals);
// track added ordinals
addedOrdinals = new BitSet();
addedOrdinals.or(currentOrdinals);
addedOrdinals.andNot(previousOrdinals);
// track updated ordinals
updatedRecords = new ArrayList<>();
HollowPrimaryKeyValueDeriver keyDeriver = new HollowPrimaryKeyValueDeriver(primaryKey, stateEngine);
HollowPrimaryKeyIndex removalsIndex = new HollowPrimaryKeyIndex(stateEngine, primaryKey, stateEngine.getMemoryRecycler(), removedOrdinals);
{ // Determine updated records (removed records and added back with different value)
int addedOrdinal = addedOrdinals.nextSetBit(0);
while (addedOrdinal != -1) {
Object[] key = keyDeriver.getRecordKey(addedOrdinal);
int removedOrdinal = removalsIndex.getMatchingOrdinal(key);
if (removedOrdinal != -1) { // record was re-added after being removed = update
updatedRecords.add(new UpdatedRecordOrdinal(removedOrdinal, addedOrdinal));
// removedOrdinal && addedOrdinal is from an UPDATE so clear it from explicit tracking
addedOrdinals.clear(addedOrdinal);
removedOrdinals.clear(removedOrdinal);
}
addedOrdinal = addedOrdinals.nextSetBit(addedOrdinal + 1);
}
}
}
/**
* @return the associated Type
*/
public String getType() {
return type;
}
/**
* @return the PrimaryKey that can uniquely identify a single record
*/
public PrimaryKey getPrimaryKey() {
return primaryKey;
}
/**
* @param ordinal the ordinal
* @return the Record at specified Ordinal
*/
public abstract T getRecord(int ordinal);
/**
* @return all the available Record
*/
public Collection<T> getAllRecords() {
return new AllHollowRecordCollection<T>(rStateEngine.getTypeState(type)) {
@Override
protected T getForOrdinal(int ordinal) {
return getRecord(ordinal);
}
};
}
/**
* @return only the Records that are Added
* @see #getUpdatedRecords()
*/
public Collection<T> getAddedRecords() {
if (!isDataChangeComputed) computeDataChange();
return new HollowRecordCollection<T>(addedOrdinals) { @Override protected T getForOrdinal(int ordinal) {
return getRecord(ordinal);
}};
}
/**
* @return only the Records that are Removed
* @see #getUpdatedRecords()
*/
public Collection<T> getRemovedRecords() {
if (!isDataChangeComputed) computeDataChange();
return new HollowRecordCollection<T>(removedOrdinals) { @Override protected T getForOrdinal(int ordinal) {
return getRecord(ordinal);
}};
}
/**
* @return the Records that are Updated with both Before and After
* @see UpdatedRecord
*/
public Collection<UpdatedRecord<T>> getUpdatedRecords() {
if (!isDataChangeComputed) computeDataChange();
return updatedRecords;
}
private class UpdatedRecordOrdinal extends UpdatedRecord<T>{
private final int before;
private final int after;
private UpdatedRecordOrdinal(int before, int after) {
super(null, null);
this.before = before;
this.after = after;
}
public T getBefore() {
return getRecord(before);
}
public T getAfter() { return getRecord(after);}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
UpdatedRecordOrdinal that = (UpdatedRecordOrdinal) o;
return before == that.before &&
after == that.after;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), before, after);
}
}
/**
* Keeps track of record before and after Update
*/
public static class UpdatedRecord<T> {
private final T before;
private final T after;
public UpdatedRecord(T before, T after) {
this.before = before;
this.after = after;
}
public T getBefore() {
return before;
}
public T getAfter() {
return after;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((after == null) ? 0 : after.hashCode());
result = prime * result + ((before == null) ? 0 : before.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
UpdatedRecord<?> other = (UpdatedRecord<?>) obj;
if (after == null) {
if (other.after != null)
return false;
} else if (!after.equals(other.after))
return false;
if (before == null) {
if (other.before != null)
return false;
} else if (!before.equals(other.before))
return false;
return true;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("UpdatedRecord [before=");
builder.append(getBefore());
builder.append(", after=");
builder.append(getAfter());
builder.append("]");
return builder.toString();
}
}
}
| 9,357 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer/data/AbstractHollowOrdinalIterable.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.hollow.api.consumer.data;
import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator;
import java.util.Iterator;
// @@@ AbstractHollowOrdinalIterable is incorrect, it's a one shot iterable that
// behaves incorrectly on second and subsequent iterations
public abstract class AbstractHollowOrdinalIterable<T> implements Iterable<T> {
private final HollowOrdinalIterator iter;
private final int firstOrdinal;
public AbstractHollowOrdinalIterable(final HollowOrdinalIterator iter) {
this.iter = iter;
this.firstOrdinal = iter.next();
}
protected abstract T getData(int ordinal);
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
private int next = firstOrdinal;
@Override
public boolean hasNext() {
return next != HollowOrdinalIterator.NO_MORE_ORDINALS;
}
@Override
public T next() {
T obj = getData(next);
next = iter.next();
return obj;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
} | 9,358 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/consumer/data/GenericHollowRecordDataAccessor.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.hollow.api.consumer.data;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.api.objects.delegate.HollowObjectGenericDelegate;
import com.netflix.hollow.api.objects.generic.GenericHollowObject;
import com.netflix.hollow.core.index.key.PrimaryKey;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.engine.object.HollowObjectTypeReadState;
/*
* Provide generic way to access data per type to retrieve All, Added, Updated and Removed Records
*
* It manages data as GenericHollowObject
*/
public class GenericHollowRecordDataAccessor extends AbstractHollowDataAccessor<GenericHollowObject> {
public GenericHollowRecordDataAccessor(HollowConsumer consumer, String type) {
super(consumer.getStateEngine(), type);
}
public GenericHollowRecordDataAccessor(HollowReadStateEngine rStateEngine, String type) {
super(rStateEngine, type, (PrimaryKey) null);
}
public GenericHollowRecordDataAccessor(HollowReadStateEngine rStateEngine, String type, String... fieldPaths) {
super(rStateEngine, type, new PrimaryKey(type, fieldPaths));
}
public GenericHollowRecordDataAccessor(HollowReadStateEngine rStateEngine, String type, PrimaryKey primaryKey) {
super(rStateEngine, type, primaryKey);
}
@Override
public GenericHollowObject getRecord(int ordinal) {
HollowObjectTypeReadState typeState = (HollowObjectTypeReadState) rStateEngine.getTypeDataAccess(type).getTypeState();
GenericHollowObject obj = new GenericHollowObject(new HollowObjectGenericDelegate(typeState), ordinal);
return obj;
}
}
| 9,359 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/perfapi/HollowPerfBackedMap.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.perfapi;
import com.netflix.hollow.core.read.dataaccess.HollowMapTypeDataAccess;
import com.netflix.hollow.core.read.iterator.HollowMapEntryOrdinalIterator;
import java.util.AbstractMap;
import java.util.AbstractSet;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Objects;
import java.util.Set;
public class HollowPerfBackedMap<K, V> extends AbstractMap<K, V> {
private final int ordinal;
private final HollowMapTypeDataAccess dataAccess;
private final long keyMaskedTypeIdx;
private final long valueMaskedTypeIdx;
private final POJOInstantiator<K> keyInstantiator;
private final POJOInstantiator<V> valueInstantiator;
private final HashKeyExtractor hashKeyExtractor;
public HollowPerfBackedMap(
HollowMapTypePerfAPI typeApi, int ordinal,
POJOInstantiator<K> keyInstantiator,
POJOInstantiator<V> valueInstantiator,
HashKeyExtractor hashKeyExtractor) {
this.ordinal = ordinal;
this.dataAccess = typeApi.typeAccess();
this.keyMaskedTypeIdx = typeApi.keyMaskedTypeIdx;
this.valueMaskedTypeIdx = typeApi.valueMaskedTypeIdx;
this.keyInstantiator = keyInstantiator;
this.valueInstantiator = valueInstantiator;
this.hashKeyExtractor = hashKeyExtractor;
}
@Override
public boolean containsKey(Object o) {
Object[] hashKey = hashKeyExtractor.extractArray(o);
return dataAccess.findValue(ordinal, hashKey) != -1;
}
@Override
public V get(Object o) {
Object[] hashKey = hashKeyExtractor.extractArray(o);
int valueOrdinal = dataAccess.findValue(ordinal, hashKey);
return valueOrdinal == -1 ? null : valueInstantiator.instantiate(valueMaskedTypeIdx | valueOrdinal);
}
@Override
public Set<Entry<K, V>> entrySet() {
return new AbstractSet<Entry<K, V>>() {
@Override
public Iterator<Entry<K, V>> iterator() {
HollowMapEntryOrdinalIterator oi = dataAccess.ordinalIterator(ordinal);
return new Iterator<Entry<K, V>>() {
boolean next = oi.next();
@Override
public boolean hasNext() {
return next;
}
@Override
public Entry<K, V> next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
long kRef = keyMaskedTypeIdx | oi.getKey();
long vRef = valueMaskedTypeIdx | oi.getValue();
Entry<K, V> e = new BackedEntry(kRef, vRef);
next = oi.next();
return e;
}
};
}
@Override
public int size() {
return HollowPerfBackedMap.this.size();
}
@Override
public boolean contains(Object o) {
if (!(o instanceof Map.Entry)) {
return false;
}
Entry<?, ?> e = (Entry<?, ?>) o;
Object[] hashKey = hashKeyExtractor.extractArray(e.getKey());
int valueOrdinal = dataAccess.findValue(ordinal, hashKey);
if(valueOrdinal != -1) {
V iV = valueInstantiator.instantiate(valueMaskedTypeIdx | valueOrdinal);
if(Objects.equals(iV, e.getValue()))
return true;
}
return false;
}
};
}
@Override
public int size() {
return dataAccess.size(ordinal);
}
final class BackedEntry implements Entry<K, V> {
final long kRef;
final long vRef;
// Lazily initialized on first access
boolean kInstantiated;
K k;
boolean vInstantiated;
V v;
BackedEntry(long kRef, long vRef) {
this.kRef = kRef;
this.vRef = vRef;
}
@Override
public K getKey() {
if (!kInstantiated) {
kInstantiated = true;
k = keyInstantiator.instantiate(kRef);
}
return k;
}
@Override
public V getValue() {
if (!vInstantiated) {
vInstantiated = true;
v = valueInstantiator.instantiate(vRef);
}
return v;
}
@Override
public V setValue(V value) {
throw new UnsupportedOperationException();
}
@Override
public boolean equals(Object o) {
if (!(o instanceof Map.Entry)) {
return false;
}
Entry<?, ?> e = (Entry<?, ?>) o;
return Objects.equals(getKey(), e.getKey()) && Objects.equals(getValue(), e.getValue());
}
@Override
public int hashCode() {
K key = getKey();
V value = getValue();
return (key == null ? 0 : key.hashCode()) ^
(value == null ? 0 : value.hashCode());
}
public String toString() {
return getKey() + "=" + getValue();
}
}
}
| 9,360 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/perfapi/HollowPerformanceAPI.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.perfapi;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.core.HollowDataset;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import com.netflix.hollow.core.schema.HollowSchema;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class HollowPerformanceAPI extends HollowAPI {
protected final PerfAPITypeIdentifiers types;
public HollowPerformanceAPI(HollowDataAccess dataAccess) {
super(dataAccess);
this.types = new PerfAPITypeIdentifiers(dataAccess);
}
public PerfAPITypeIdentifiers getTypeIdentifiers() {
return types;
};
public static class PerfAPITypeIdentifiers {
private final String[] typeNames;
private final Map<String, Integer> typeIdxMap;
public PerfAPITypeIdentifiers(HollowDataset dataset) {
List<HollowSchema> schemas = dataset.getSchemas();
this.typeIdxMap = new HashMap<>();
String[] typeNames = new String[schemas.size()];
for (int i = 0; i < schemas.size(); i++) {
typeNames[i] = schemas.get(i).getName();
typeIdxMap.put(typeNames[i], i);
}
this.typeNames = typeNames;
}
public int getIdx(String typeName) {
Integer idx = typeIdxMap.get(typeName);
if (idx == null) {
return Ref.TYPE_ABSENT;
}
return idx;
}
public String getTypeName(int idx) {
if (idx >= 0 && idx < typeNames.length) {
return typeNames[idx];
}
return "INVALID (" + idx + ")";
}
}
}
| 9,361 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/perfapi/Ref.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.perfapi;
/**
* Utility methods on hollow references.
* <p>
* A hollow reference is a 64 bit pointer comprised of two parts.
* One part is an ordinal value.
* The other part is a type identifier associated with the ordinal.
* A hollow reference provides a degree of type safety such that a hollow reference can only be used to
* operate on a hollow object whose type corresponds to the reference's type identifier.
*/
public final class Ref {
public static final int TYPE_ABSENT = -1;
private static final long TYPE_MASK = 0x0000FFFF_00000000L;
public static final long NULL = -1;
private Ref() {
}
public static boolean isNonNull(long ref) {
return ref != -1;
}
public static boolean isNull(long ref) {
return ref == -1;
}
public static boolean isRefOfType(int type, long ref) {
return isRefOfTypeMasked(toTypeMasked(type), ref);
}
public static boolean isRefOfTypeMasked(long typeMasked, long ref) {
return typeMasked(ref) == typeMasked;
}
public static int ordinal(long ref) {
return (int) ref;
}
public static int type(long ref) {
return (int) (ref >>> 32);
}
public static long typeMasked(long ref) {
return ref & TYPE_MASK;
}
public static long toRef(int type, int ordinal) {
return toTypeMasked(type) | ordinal;
}
public static long toRefWithTypeMasked(long typeMasked, int ordinal) {
// @@@ This erases the type
return typeMasked | ordinal;
}
public static long toTypeMasked(int type) {
return ((long) type << 32) & TYPE_MASK;
}
}
| 9,362 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/perfapi/HollowListTypePerfAPI.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.perfapi;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowListTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.missing.HollowListMissingDataAccess;
import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator;
import java.util.List;
public class HollowListTypePerfAPI extends HollowTypePerfAPI {
private final HollowListTypeDataAccess typeAccess;
final long elementMaskedTypeIdx;
public HollowListTypePerfAPI(HollowDataAccess dataAccess, String typeName, HollowPerformanceAPI api) {
super(typeName, api);
HollowListTypeDataAccess typeAccess = (HollowListTypeDataAccess) dataAccess.getTypeDataAccess(typeName);
int elementTypeIdx = typeAccess == null ? Ref.TYPE_ABSENT : api.types.getIdx(typeAccess.getSchema().getElementType());
this.elementMaskedTypeIdx = Ref.toTypeMasked(elementTypeIdx);
if(typeAccess == null)
typeAccess = new HollowListMissingDataAccess(dataAccess, typeName);
this.typeAccess = typeAccess;
}
public int size(long ref) {
return typeAccess.size(ordinal(ref));
}
public long get(long ref, int idx) {
int ordinal = typeAccess.getElementOrdinal(ordinal(ref), idx);
return Ref.toRefWithTypeMasked(elementMaskedTypeIdx, ordinal);
}
public HollowPerfReferenceIterator iterator(long ref) {
HollowOrdinalIterator iter = typeAccess.ordinalIterator(ordinal(ref));
return new HollowPerfReferenceIterator(iter, elementMaskedTypeIdx);
}
public <T> List<T> backedList(long ref, POJOInstantiator<T> instantiator) {
return new HollowPerfBackedList<>(this, ordinal(ref), instantiator);
}
public HollowListTypeDataAccess typeAccess() {
return typeAccess;
}
}
| 9,363 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/perfapi/HollowPerfAPICache.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.perfapi;
import com.netflix.hollow.core.read.engine.PopulatedOrdinalListener;
import java.util.Arrays;
import java.util.BitSet;
public class HollowPerfAPICache<T> {
private static final Object[] EMPTY_CACHE = new Object[0];
private final HollowTypePerfAPI typeAPI;
private final Object[] cachedItems;
public HollowPerfAPICache(
HollowTypePerfAPI typeAPI,
POJOInstantiator<T> instantiator,
HollowPerfAPICache<T> previous) {
this.typeAPI = typeAPI;
if(!typeAPI.isMissingType()) {
PopulatedOrdinalListener listener = typeAPI.typeAccess().getTypeState()
.getListener(PopulatedOrdinalListener.class);
BitSet populatedOrdinals = listener.getPopulatedOrdinals();
BitSet previousOrdinals = listener.getPreviousOrdinals();
int length = Math.max(populatedOrdinals.length(), previousOrdinals.length());
// Copy over all previously cached items, resizing the array if necessary.
// This is required if removed ordinals are queried in the cache.
// For example, see SpecificTypeUpdateNotifier.buildFastlaneUpdateNotificationLists
Object[] arr = previous != null
? Arrays.copyOf(previous.cachedItems, length)
: new Object[length];
for (int ordinal = 0; ordinal < length; ordinal++) {
boolean previouslyPopulated = previous != null && previousOrdinals.get(ordinal);
if (!previouslyPopulated) {
// If not previously populated and currently populated then create a new cached instance.
// Otherwise, if not previously populated and not currently populated than null out any
// possibly present old cached value (create a hole)
boolean currentlyPopulated = populatedOrdinals.get(ordinal);
arr[ordinal] = currentlyPopulated
? instantiator.instantiate(Ref.toRefWithTypeMasked(typeAPI.maskedTypeIdx, ordinal))
: null;
}
// If previously populated then retain the cached item
}
this.cachedItems = arr;
} else {
this.cachedItems = EMPTY_CACHE;
}
}
public T get(long ref) {
@SuppressWarnings("unchecked")
T t = (T) cachedItems[typeAPI.ordinal(ref)];
return t;
}
public Object[] getCachedItems() {
return Arrays.copyOf(cachedItems, cachedItems.length);
}
}
| 9,364 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/perfapi/HollowObjectTypePerfAPI.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.perfapi;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.missing.HollowObjectMissingDataAccess;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import java.util.Arrays;
public abstract class HollowObjectTypePerfAPI extends HollowTypePerfAPI {
protected final HollowObjectTypeDataAccess typeAccess;
protected final int[] fieldIdx;
protected final long[] refMaskedTypeIdx;
public HollowObjectTypePerfAPI(HollowDataAccess dataAccess, String typeName, HollowPerformanceAPI api, String[] fieldNames) {
super(typeName, api);
HollowObjectTypeDataAccess typeAccess = (HollowObjectTypeDataAccess) dataAccess.getTypeDataAccess(typeName);
this.fieldIdx = new int[fieldNames.length];
this.refMaskedTypeIdx = new long[fieldNames.length];
if(typeAccess != null) {
HollowObjectSchema schema = typeAccess.getSchema();
for(int i=0;i<fieldNames.length;i++) {
fieldIdx[i] = schema.getPosition(fieldNames[i]);
if(fieldIdx[i] != -1 && schema.getFieldType(fieldIdx[i]) == FieldType.REFERENCE) {
refMaskedTypeIdx[i] = Ref.toTypeMasked(api.types.getIdx(schema.getReferencedType(fieldIdx[i])));
}
}
} else {
Arrays.fill(fieldIdx, -1);
Arrays.fill(refMaskedTypeIdx, Ref.toTypeMasked(Ref.TYPE_ABSENT));
}
if(typeAccess == null)
typeAccess = new HollowObjectMissingDataAccess(dataAccess, typeName);
this.typeAccess = typeAccess;
}
public HollowObjectTypeDataAccess typeAccess() {
return typeAccess;
}
}
| 9,365 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/perfapi/HollowSetTypePerfAPI.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.perfapi;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowSetTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.missing.HollowSetMissingDataAccess;
import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator;
import java.util.Set;
public class HollowSetTypePerfAPI extends HollowTypePerfAPI {
private final HollowSetTypeDataAccess typeAccess;
final long elementMaskedTypeIdx;
public HollowSetTypePerfAPI(HollowDataAccess dataAccess, String typeName, HollowPerformanceAPI api) {
super(typeName, api);
HollowSetTypeDataAccess typeAccess = (HollowSetTypeDataAccess) dataAccess.getTypeDataAccess(typeName);
int elementTypeIdx = typeAccess == null ? Ref.TYPE_ABSENT : api.types.getIdx(typeAccess.getSchema().getElementType());
this.elementMaskedTypeIdx = Ref.toTypeMasked(elementTypeIdx);
if(typeAccess == null)
typeAccess = new HollowSetMissingDataAccess(dataAccess, typeName);
this.typeAccess = typeAccess;
}
public int size(long ref) {
return typeAccess.size(ordinal(ref));
}
public HollowPerfReferenceIterator iterator(long ref) {
HollowOrdinalIterator iter = typeAccess.ordinalIterator(ordinal(ref));
return new HollowPerfReferenceIterator(iter, elementMaskedTypeIdx);
}
public long findElement(long ref, Object... hashKey) {
int ordinal = typeAccess.findElement(ordinal(ref), hashKey);
return Ref.toRefWithTypeMasked(elementMaskedTypeIdx, ordinal);
}
public <T> Set<T> backedSet(long ref, POJOInstantiator<T> instantiator, HashKeyExtractor hashKeyExtractor) {
return new HollowPerfBackedSet<>(this, ref, instantiator, hashKeyExtractor);
}
public HollowSetTypeDataAccess typeAccess() {
return typeAccess;
}
}
| 9,366 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/perfapi/POJOInstantiator.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.perfapi;
@FunctionalInterface
public interface POJOInstantiator<T> {
T instantiate(long ref);
}
| 9,367 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/perfapi/HollowTypePerfAPI.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.perfapi;
import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess;
public abstract class HollowTypePerfAPI {
private final HollowPerformanceAPI api;
protected final long maskedTypeIdx;
public HollowTypePerfAPI(String typeName, HollowPerformanceAPI api) {
int typeIdx = api.types.getIdx(typeName);
this.maskedTypeIdx = Ref.toTypeMasked(typeIdx);
this.api = api;
}
public long refForOrdinal(int ordinal) {
return Ref.toRefWithTypeMasked(maskedTypeIdx, ordinal);
}
public abstract HollowTypeDataAccess typeAccess();
public HollowPerformanceAPI api() {
return api;
}
/**
* Gets the ordinal of the reference and checks that the reference is of the correct type.
* @param ref the reference
* @return the ordinal
* @throws IllegalArgumentException if the reference's type differs
*/
public int ordinal(long ref) {
if (!Ref.isRefOfTypeMasked(maskedTypeIdx, ref)) {
String expectedType = api.types.getTypeName(Ref.type(maskedTypeIdx));
if (Ref.isNull(ref)) {
throw new NullPointerException("Reference is null -- expected type " + expectedType);
}
String foundType = api.types.getTypeName(Ref.type(ref));
throw new IllegalArgumentException("Wrong reference type -- expected type " + expectedType + " but ref was of type " + foundType);
}
return Ref.ordinal(ref);
}
public boolean isMissingType() {
return maskedTypeIdx == Ref.toTypeMasked(Ref.TYPE_ABSENT);
}
}
| 9,368 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/perfapi/HollowPerfBackedList.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.perfapi;
import com.netflix.hollow.core.read.dataaccess.HollowListTypeDataAccess;
import java.util.AbstractList;
import java.util.RandomAccess;
public class HollowPerfBackedList<T> extends AbstractList<T> implements RandomAccess {
private final int ordinal;
private final HollowListTypeDataAccess dataAccess;
private final long elementMaskedTypeIdx;
private final POJOInstantiator<T> instantiator;
public HollowPerfBackedList(HollowListTypePerfAPI typeAPI, int ordinal,
POJOInstantiator<T> instantiator) {
this.dataAccess = typeAPI.typeAccess();
this.ordinal = ordinal;
this.instantiator = instantiator;
this.elementMaskedTypeIdx = typeAPI.elementMaskedTypeIdx;
}
@Override
public T get(int index) {
return instantiator.instantiate(elementMaskedTypeIdx | dataAccess.getElementOrdinal(ordinal, index));
}
@Override
public int size() {
return dataAccess.size(ordinal);
}
}
| 9,369 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/perfapi/HollowRef.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.perfapi;
public abstract class HollowRef {
protected final long ref;
protected HollowRef(long ref) {
this.ref = ref;
}
public long ref() {
return ref;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof HollowRef)) {
return false;
}
HollowRef hollowRef = (HollowRef) o;
return ref == hollowRef.ref;
}
@Override
public int hashCode() {
return Long.hashCode(ref);
}
}
| 9,370 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/perfapi/HollowPerfReferenceIterator.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.perfapi;
import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator;
public class HollowPerfReferenceIterator {
private final long elementMaskedTypeIdx;
private final HollowOrdinalIterator iter;
private int next;
public HollowPerfReferenceIterator(HollowOrdinalIterator iter, long elementMaskedTypeIdx) {
this.iter = iter;
this.elementMaskedTypeIdx = elementMaskedTypeIdx;
this.next = iter.next();
}
public boolean hasNext() {
return next != HollowOrdinalIterator.NO_MORE_ORDINALS;
}
public long next() {
long nextRef = Ref.toRefWithTypeMasked(elementMaskedTypeIdx, next);
next = iter.next();
return nextRef;
}
}
| 9,371 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/perfapi/HashKeyExtractor.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.perfapi;
@FunctionalInterface
public interface HashKeyExtractor {
public Object extract(Object extractFrom);
public default Object[] extractArray(Object extractFrom) {
Object obj = extract(extractFrom);
if(obj.getClass().isArray()) {
return (Object[])obj;
}
return new Object[] { obj };
}
}
| 9,372 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/perfapi/HollowPerfBackedSet.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.perfapi;
import com.netflix.hollow.core.read.dataaccess.HollowSetTypeDataAccess;
import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator;
import java.util.AbstractSet;
import java.util.Iterator;
import java.util.NoSuchElementException;
public class HollowPerfBackedSet<T> extends AbstractSet<T> {
private final int ordinal;
private final HollowSetTypeDataAccess dataAccess;
private final long elementMaskedTypeIdx;
private final POJOInstantiator<T> instantiator;
private final HashKeyExtractor hashKeyExtractor;
public HollowPerfBackedSet(
HollowSetTypePerfAPI typeApi,
long ref,
POJOInstantiator<T> instantiator,
HashKeyExtractor hashKeyExtractor) {
this.dataAccess = typeApi.typeAccess();
this.ordinal = typeApi.ordinal(ref);
this.instantiator = instantiator;
this.elementMaskedTypeIdx = typeApi.elementMaskedTypeIdx;
this.hashKeyExtractor = hashKeyExtractor;
}
@Override
public Iterator<T> iterator() {
HollowOrdinalIterator oi = dataAccess.ordinalIterator(ordinal);
return new Iterator<T>() {
int eo = oi.next();
@Override public boolean hasNext() {
return eo != HollowOrdinalIterator.NO_MORE_ORDINALS;
}
@Override public T next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
int o = eo;
eo = oi.next();
return instantiator.instantiate(elementMaskedTypeIdx | o);
}
};
}
@Override
public boolean contains(Object o) {
if(hashKeyExtractor == null)
throw new UnsupportedOperationException();
Object[] key = hashKeyExtractor.extractArray(o);
if(key == null)
return false;
return dataAccess.findElement(ordinal, key) != -1;
}
@Override
public int size() {
return dataAccess.size(ordinal);
}
}
| 9,373 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/perfapi/HollowMapTypePerfAPI.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.perfapi;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowMapTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.missing.HollowMapMissingDataAccess;
import com.netflix.hollow.core.read.iterator.HollowMapEntryOrdinalIterator;
import java.util.Map;
public class HollowMapTypePerfAPI extends HollowTypePerfAPI {
private final HollowMapTypeDataAccess typeAccess;
final long keyMaskedTypeIdx;
final long valueMaskedTypeIdx;
public HollowMapTypePerfAPI(HollowDataAccess dataAccess, String typeName, HollowPerformanceAPI api) {
super(typeName, api);
HollowMapTypeDataAccess typeAccess = (HollowMapTypeDataAccess) dataAccess.getTypeDataAccess(typeName);
int keyTypeIdx = typeAccess == null ? Ref.TYPE_ABSENT : api.types.getIdx(typeAccess.getSchema().getKeyType());
int valueTypeIdx = typeAccess == null ? Ref.TYPE_ABSENT : api.types.getIdx(typeAccess.getSchema().getValueType());
this.keyMaskedTypeIdx = Ref.toTypeMasked(keyTypeIdx);
this.valueMaskedTypeIdx = Ref.toTypeMasked(valueTypeIdx);
if(typeAccess == null)
typeAccess = new HollowMapMissingDataAccess(dataAccess, typeName);
this.typeAccess = typeAccess;
}
public int size(long ref) {
return typeAccess.size(ordinal(ref));
}
public HollowPerfMapEntryIterator possibleMatchIter(long ref, int hashCode) {
HollowMapEntryOrdinalIterator iter = typeAccess.potentialMatchOrdinalIterator(ordinal(ref), hashCode);
return new HollowPerfMapEntryIterator(iter, keyMaskedTypeIdx, valueMaskedTypeIdx);
}
public HollowPerfMapEntryIterator iterator(long ref) {
HollowMapEntryOrdinalIterator iter = typeAccess.ordinalIterator(ordinal(ref));
return new HollowPerfMapEntryIterator(iter, keyMaskedTypeIdx, valueMaskedTypeIdx);
}
public long findKey(long ref, Object... hashKey) {
int ordinal = typeAccess.findKey(ordinal(ref), hashKey);
return Ref.toRefWithTypeMasked(keyMaskedTypeIdx, ordinal);
}
public long findValue(long ref, Object... hashKey) {
int ordinal = typeAccess.findValue(ordinal(ref), hashKey);
return Ref.toRefWithTypeMasked(valueMaskedTypeIdx, ordinal);
}
public <K,V> Map<K,V> backedMap(long ref, POJOInstantiator<K> keyInstantiator, POJOInstantiator<V> valueInstantiator, HashKeyExtractor hashKeyExtractor) {
return new HollowPerfBackedMap<K,V>(this, ordinal(ref), keyInstantiator, valueInstantiator, hashKeyExtractor);
}
public HollowMapTypeDataAccess typeAccess() {
return typeAccess;
}
} | 9,374 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/perfapi/HollowPerfMapEntryIterator.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.perfapi;
import com.netflix.hollow.core.read.iterator.HollowMapEntryOrdinalIterator;
public class HollowPerfMapEntryIterator {
private final long keyMaskedTypeIdx;
private final long valueMaskedTypeIdx;
private final HollowMapEntryOrdinalIterator iter;
public HollowPerfMapEntryIterator(HollowMapEntryOrdinalIterator iter, long keyMaskedTypeIdx, long valueMaskedTypeIdx) {
this.iter = iter;
this.keyMaskedTypeIdx = keyMaskedTypeIdx;
this.valueMaskedTypeIdx = valueMaskedTypeIdx;
}
public boolean next() {
return iter.next();
}
public long getKey() {
return Ref.toRefWithTypeMasked(keyMaskedTypeIdx, iter.getKey());
}
public long getValue() {
return Ref.toRefWithTypeMasked(valueMaskedTypeIdx, iter.getValue());
}
}
| 9,375 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/custom/HollowMapTypeAPI.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.custom;
import com.netflix.hollow.core.read.dataaccess.HollowMapTypeDataAccess;
import com.netflix.hollow.core.read.iterator.HollowMapEntryOrdinalIterator;
/**
* This is the Hollow Type API interface for MAP type records.
*
* @see HollowTypeAPI
*/
public class HollowMapTypeAPI extends HollowTypeAPI {
public HollowMapTypeAPI(HollowAPI api, HollowMapTypeDataAccess typeDataAccess) {
super(api, typeDataAccess);
}
public int size(int ordinal) {
return getTypeDataAccess().size(ordinal);
}
public int get(int ordinal, int keyOrdinal) {
return getTypeDataAccess().get(ordinal, keyOrdinal);
}
public int get(int ordinal, int keyOrdinal, int hashCode) {
return getTypeDataAccess().get(ordinal, keyOrdinal, hashCode);
}
public int findKey(int ordinal, Object... hashKey) {
return getTypeDataAccess().findKey(ordinal, hashKey);
}
public int findValue(int ordinal, Object... hashKey) {
return getTypeDataAccess().findValue(ordinal, hashKey);
}
public long findEntry(int ordinal, Object... hashKey) {
return getTypeDataAccess().findEntry(ordinal, hashKey);
}
public HollowMapEntryOrdinalIterator getOrdinalIterator(int ordinal) {
return getTypeDataAccess().ordinalIterator(ordinal);
}
public HollowMapEntryOrdinalIterator potentialMatchOrdinalIterator(int ordinal, int hashCode) {
return getTypeDataAccess().potentialMatchOrdinalIterator(ordinal, hashCode);
}
@Override
public HollowMapTypeDataAccess getTypeDataAccess() {
return (HollowMapTypeDataAccess) typeDataAccess;
}
}
| 9,376 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/custom/HollowAPI.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.custom;
import com.netflix.hollow.api.codegen.HollowAPIGenerator;
import com.netflix.hollow.api.sampling.HollowSamplingDirector;
import com.netflix.hollow.api.sampling.SampleResult;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import com.netflix.hollow.core.read.filter.HollowFilterConfig;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* A HollowAPI wraps a HollowDataAccess. This is the parent class of any Generated Hollow API.
*
* Generated Hollow APIs are created via the {@link HollowAPIGenerator}.
*/
public class HollowAPI {
private final HollowDataAccess dataAccess;
private final List<HollowTypeAPI> typeAPIs;
protected HollowSamplingDirector samplingDirector;
public HollowAPI(HollowDataAccess dataAccess) {
this.dataAccess = dataAccess;
this.typeAPIs = new ArrayList<HollowTypeAPI>();
}
public HollowDataAccess getDataAccess() {
return dataAccess;
}
public HollowSamplingDirector getSamplingDirector() {
return samplingDirector;
}
public void setSamplingDirector(HollowSamplingDirector samplingDirector) {
this.samplingDirector = samplingDirector;
for(HollowTypeAPI typeAPI : typeAPIs) {
typeAPI.setSamplingDirector(samplingDirector);
}
}
public void setFieldSpecificSamplingDirector(HollowFilterConfig fieldSpec, HollowSamplingDirector director) {
for(HollowTypeAPI typeAPI : typeAPIs) {
typeAPI.setFieldSpecificSamplingDirector(fieldSpec, director);
}
}
public void ignoreUpdateThreadForSampling(Thread t) {
for(HollowTypeAPI typeAPI : typeAPIs) {
typeAPI.ignoreUpdateThreadForSampling(t);
}
}
public List<SampleResult> getAccessSampleResults() {
List<SampleResult> sampleResults = new ArrayList<SampleResult>();
for(HollowTypeAPI typeAPI : typeAPIs) {
sampleResults.addAll(typeAPI.getAccessSampleResults());
}
Collections.sort(sampleResults);
return sampleResults;
}
public List<SampleResult> getBoxedSampleResults() {
List<SampleResult> sampleResults = new ArrayList<SampleResult>();
for(HollowTypeAPI typeAPI : typeAPIs) {
if(typeAPI instanceof HollowObjectTypeAPI) {
sampleResults.addAll(((HollowObjectTypeAPI)typeAPI).getBoxedFieldAccessSampler().getSampleResults());
}
}
Collections.sort(sampleResults);
return sampleResults;
}
public void detachCaches() { }
protected void addTypeAPI(HollowTypeAPI typeAPI) {
this.typeAPIs.add(typeAPI);
}
}
| 9,377 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/custom/HollowTypeAPI.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.custom;
import com.netflix.hollow.api.sampling.HollowSamplingDirector;
import com.netflix.hollow.api.sampling.SampleResult;
import com.netflix.hollow.core.read.dataaccess.HollowTypeDataAccess;
import com.netflix.hollow.core.read.filter.HollowFilterConfig;
import java.util.Collection;
/**
* A Hollow Type API provides methods for accessing data in Hollow records without creating
* wrapper objects as handles. Instead, the ordinals can be used directly as handles to the data.
* <p>
* This can be useful in tight loops, where the excess object creation incurred by using a Generated or Generic
* Hollow Object API would be prohibitively expensive.
*/
public abstract class HollowTypeAPI {
protected final HollowAPI api;
protected final HollowTypeDataAccess typeDataAccess;
protected HollowTypeAPI(HollowAPI api, HollowTypeDataAccess typeDataAccess) {
this.api = api;
this.typeDataAccess = typeDataAccess;
}
public HollowAPI getAPI() {
return api;
}
public HollowTypeDataAccess getTypeDataAccess() {
return typeDataAccess;
}
public void setSamplingDirector(HollowSamplingDirector samplingDirector) {
typeDataAccess.setSamplingDirector(samplingDirector);
}
public void setFieldSpecificSamplingDirector(HollowFilterConfig fieldSpec, HollowSamplingDirector director) {
typeDataAccess.setFieldSpecificSamplingDirector(fieldSpec, director);
}
public void ignoreUpdateThreadForSampling(Thread t) {
typeDataAccess.ignoreUpdateThreadForSampling(t);
}
public Collection<SampleResult> getAccessSampleResults() {
return typeDataAccess.getSampler().getSampleResults();
}
}
| 9,378 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/custom/HollowObjectTypeAPI.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.custom;
import com.netflix.hollow.api.sampling.DisabledSamplingDirector;
import com.netflix.hollow.api.sampling.HollowObjectSampler;
import com.netflix.hollow.api.sampling.HollowSampler;
import com.netflix.hollow.api.sampling.HollowSamplingDirector;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import com.netflix.hollow.core.read.dataaccess.HollowObjectTypeDataAccess;
import com.netflix.hollow.core.read.dataaccess.missing.HollowObjectMissingDataAccess;
import com.netflix.hollow.core.read.filter.HollowFilterConfig;
import com.netflix.hollow.core.read.missing.MissingDataHandler;
import com.netflix.hollow.core.schema.HollowObjectSchema;
import java.util.Arrays;
/**
* This is the Hollow Type API interface for OBJECT type records.
* <p>
* In a Generated Hollow API, this will be extended for each OBJECT type with specific methods to retrieve each field.
*
* @see HollowTypeAPI
*/
public abstract class HollowObjectTypeAPI extends HollowTypeAPI {
protected final String fieldNames[];
protected final int fieldIndex[];
protected final HollowObjectSampler boxedFieldAccessSampler;
protected HollowObjectTypeAPI(HollowAPI api, HollowObjectTypeDataAccess typeDataAccess, String fieldNames[]) {
super(api, typeDataAccess);
this.fieldNames = fieldNames;
this.fieldIndex = new int[fieldNames.length];
HollowObjectSampler boxedFieldAccessSampler = HollowObjectSampler.NULL_SAMPLER;
if(!(typeDataAccess instanceof HollowObjectMissingDataAccess)) {
HollowObjectSchema schema = typeDataAccess.getSchema();
for(int i=0;i<fieldNames.length;i++) {
int fieldPosition = schema.getPosition(fieldNames[i]);
fieldIndex[i] = fieldPosition;
}
boxedFieldAccessSampler = new HollowObjectSampler(schema, DisabledSamplingDirector.INSTANCE);
} else {
Arrays.fill(fieldIndex, -1);
}
this.boxedFieldAccessSampler = boxedFieldAccessSampler;
}
@Override
public HollowObjectTypeDataAccess getTypeDataAccess() {
return (HollowObjectTypeDataAccess) typeDataAccess;
}
public HollowDataAccess getDataAccess() {
return typeDataAccess.getDataAccess();
}
public HollowSampler getBoxedFieldAccessSampler() {
return boxedFieldAccessSampler;
}
@Override
public void setSamplingDirector(HollowSamplingDirector samplingDirector) {
super.setSamplingDirector(samplingDirector);
boxedFieldAccessSampler.setSamplingDirector(samplingDirector);
}
@Override
public void setFieldSpecificSamplingDirector(HollowFilterConfig fieldSpec, HollowSamplingDirector director) {
super.setFieldSpecificSamplingDirector(fieldSpec, director);
boxedFieldAccessSampler.setFieldSpecificSamplingDirector(fieldSpec, director);
}
@Override
public void ignoreUpdateThreadForSampling(Thread t) {
super.ignoreUpdateThreadForSampling(t);
boxedFieldAccessSampler.setUpdateThread(t);
}
protected MissingDataHandler missingDataHandler() {
return api.getDataAccess().getMissingDataHandler();
}
}
| 9,379 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/custom/HollowSetTypeAPI.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.custom;
import com.netflix.hollow.core.read.dataaccess.HollowSetTypeDataAccess;
import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator;
/**
* This is the Hollow Type API interface for SET type records.
*
* @see HollowTypeAPI
*/
public class HollowSetTypeAPI extends HollowTypeAPI {
public HollowSetTypeAPI(HollowAPI api, HollowSetTypeDataAccess typeDataAccess) {
super(api, typeDataAccess);
}
public int size(int ordinal) {
return getTypeDataAccess().size(ordinal);
}
public boolean contains(int ordinal, int value) {
return getTypeDataAccess().contains(ordinal, value);
}
public boolean contains(int ordinal, int value, int hashCode) {
return getTypeDataAccess().contains(ordinal, value, hashCode);
}
public int findElement(int ordinal, Object... hashKey) {
return getTypeDataAccess().findElement(ordinal, hashKey);
}
public HollowOrdinalIterator potentialMatchOrdinalIterator(int ordinal, int hashCode) {
return getTypeDataAccess().potentialMatchOrdinalIterator(ordinal, hashCode);
}
public HollowOrdinalIterator getOrdinalIterator(int ordinal) {
return getTypeDataAccess().ordinalIterator(ordinal);
}
@Override
public HollowSetTypeDataAccess getTypeDataAccess() {
return (HollowSetTypeDataAccess) typeDataAccess;
}
}
| 9,380 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/custom/HollowListTypeAPI.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.custom;
import com.netflix.hollow.core.read.dataaccess.HollowListTypeDataAccess;
import com.netflix.hollow.core.read.iterator.HollowOrdinalIterator;
/**
* This is the Hollow Type API interface for LIST type records.
*
* @see HollowTypeAPI
*/
public class HollowListTypeAPI extends HollowTypeAPI {
public HollowListTypeAPI(HollowAPI api, HollowListTypeDataAccess typeDataAccess) {
super(api, typeDataAccess);
}
public int size(int ordinal) {
return getTypeDataAccess().size(ordinal);
}
public int getElementOrdinal(int ordinal, int listIdx) {
return getTypeDataAccess().getElementOrdinal(ordinal, listIdx);
}
public HollowOrdinalIterator getOrdinalIterator(int ordinal) {
return getTypeDataAccess().ordinalIterator(ordinal);
}
@Override
public HollowListTypeDataAccess getTypeDataAccess() {
return (HollowListTypeDataAccess) typeDataAccess;
}
}
| 9,381 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/error/IncompatibleSchemaException.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.error;
/**
* An exception thrown when trying to compare two versions of an object with incompatible schema.
*/
public class IncompatibleSchemaException extends HollowException {
private final String typeName;
private final String fieldName;
private final String fieldType;
private final String otherType;
public IncompatibleSchemaException(String typeName, String fieldName, String fieldType,
String otherType) {
super("No common schema exists for " + typeName + ": field " + fieldName
+ " has unmatched types: " + fieldType + " vs " + otherType);
this.typeName = typeName;
this.fieldName = fieldName;
this.fieldType = fieldType;
this.otherType = otherType;
}
public String getTypeName() {
return this.typeName;
}
public String getFieldName() {
return this.fieldName;
}
public String getFieldType() {
return this.fieldType;
}
public String getOtherType() {
return this.otherType;
}
}
| 9,382 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/error/SchemaNotFoundException.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.error;
import java.util.Collection;
/**
* An exception thrown when trying to use a schema that does not exist.
*/
public class SchemaNotFoundException extends HollowException {
private final String typeName;
private final Collection<String> availableTypes;
public SchemaNotFoundException(String typeName, Collection<String> availableTypes) {
super("Could not find schema for " + typeName + " - " + getMessageSuffix(availableTypes));
this.typeName = typeName;
this.availableTypes = availableTypes;
}
public String getTypeName() {
return this.typeName;
}
public Collection<String> getAvailableTypes() {
return this.availableTypes;
}
private static String getMessageSuffix(Collection<String> availableTypes) {
if (availableTypes.isEmpty()) {
return "empty type state, make sure your namespace has published versions";
} else {
return "available schemas: " + availableTypes;
}
}
}
| 9,383 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/error/HollowException.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.error;
/**
* A generic exception thrown by hollow. In most cases, a subclass of this exception is thrown.
*/
public class HollowException extends RuntimeException {
public HollowException(String message) {
super(message);
}
public HollowException(String message, Throwable cause) {
super(message, cause);
}
public HollowException(Throwable cause) {
super(cause);
}
}
| 9,384 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/error/HollowWriteStateException.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.error;
/**
* An exception thrown when the write state is unable to advance, revert, or otherwise fails.
*/
public class HollowWriteStateException extends HollowException {
public HollowWriteStateException(String message) {
super(message);
}
public HollowWriteStateException(String message, Throwable cause) {
super(message, cause);
}
public HollowWriteStateException(Throwable cause) {
super(cause);
}
}
| 9,385 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/client/FailedTransitionTracker.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.client;
import com.netflix.hollow.api.consumer.HollowConsumer;
import java.util.HashSet;
/**
* Tracks the blobs which failed to be successfully applied by a HollowClient. Blobs logged in this
* tracker will not be attempted again.
*/
public class FailedTransitionTracker {
private final HashSet<Long> failedSnapshotTransitions;
private final HashSet<DeltaTransition> failedDeltaTransitions;
public FailedTransitionTracker() {
this.failedSnapshotTransitions = new HashSet<Long>();
this.failedDeltaTransitions = new HashSet<DeltaTransition>();
}
public void markAllTransitionsAsFailed(HollowUpdatePlan plan) {
for(HollowConsumer.Blob transition : plan)
markFailedTransition(transition);
}
public void markFailedTransition(HollowConsumer.Blob transition) {
if(transition.isSnapshot()) {
failedSnapshotTransitions.add(transition.getToVersion());
} else {
failedDeltaTransitions.add(delta(transition));
}
}
public boolean anyTransitionWasFailed(HollowUpdatePlan plan) {
for(HollowConsumer.Blob transition : plan) {
if(transitionWasFailed(transition))
return true;
}
return false;
}
/**
* @return the number of failed snapshot transitions.
*/
public int getNumFailedSnapshotTransitions() {
return this.failedSnapshotTransitions.size();
}
/**
* @return the number of failed delta transitions.
*/
public int getNumFailedDeltaTransitions() {
return this.failedDeltaTransitions.size();
}
/**
* Clear all failing transitions.
*/
public void clear() {
failedSnapshotTransitions.clear();
failedDeltaTransitions.clear();
}
private boolean transitionWasFailed(HollowConsumer.Blob transition) {
if(transition.isSnapshot())
return failedSnapshotTransitions.contains(transition.getToVersion());
return failedDeltaTransitions.contains(delta(transition));
}
private DeltaTransition delta(HollowConsumer.Blob transition) {
return new DeltaTransition(transition.getFromVersion(), transition.getToVersion());
}
private class DeltaTransition {
private final long fromState;
private final long toState;
DeltaTransition(long fromState, long toState) {
this.fromState = fromState;
this.toState = toState;
}
@Override
public int hashCode() {
return (int)fromState
^ (int)(fromState >> 32)
^ (int)toState
^ (int)(toState >> 32);
}
@Override
public boolean equals(Object obj) {
if(obj instanceof DeltaTransition) {
DeltaTransition other = (DeltaTransition)obj;
return other.fromState == fromState && other.toState == toState;
}
return false;
}
}
}
| 9,386 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/client/StaleHollowReferenceDetector.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.client;
import static com.netflix.hollow.core.util.Threads.daemonThread;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.api.sampling.EnabledSamplingDirector;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import com.netflix.hollow.core.read.dataaccess.proxy.HollowProxyDataAccess;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.tools.history.HollowHistoricalStateDataAccess;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
* Detect stale Hollow references and USAGE of stale hollow references.
*
* When obtaining a reference to a hollow object, this reference is not intended to be held on to indefinitely. This
* class detects whether references are held and/or used beyond some expected lifetime.
*
* If objects are detected as held beyond some grace period but not used beyond that period, then they will be detached,
* so they do not hang on to the entire historical data store beyond some length of time.
*
* This class is also responsible for notifying the HollowUpdateListener if stale references or usage is detected.
*/
public class StaleHollowReferenceDetector {
/// Every HOUSEKEEPING_INTERVAL, in milliseconds, check to see whether
/// a) We should transition from the GRACE_PERIOD to the DISABLE_TEST_PERIOD
/// b) any object in the DISABLE_TEST_PERIOD has been accessed
/// c) any hollow objects are referenced which we expect to be unreferenced
/// and do the appropriate disabling / send the appropriate signals to the update listener.
private static final long HOUSEKEEPING_INTERVAL = 30000L;
private static final EnabledSamplingDirector ENABLED_SAMPLING_DIRECTOR = new EnabledSamplingDirector();
private final List<HollowWeakReferenceHandle> handles;
private final HollowConsumer.ObjectLongevityConfig config;
private final HollowConsumer.ObjectLongevityDetector detector;
private final StackTraceRecorder stackTraceRecorder;
private Thread monitor;
public StaleHollowReferenceDetector(HollowConsumer.ObjectLongevityConfig config, HollowConsumer.ObjectLongevityDetector detector) {
this.handles = new ArrayList<HollowWeakReferenceHandle>();
this.config = config;
this.detector = detector;
this.stackTraceRecorder = new StackTraceRecorder(25);
}
synchronized boolean isKnownAPIHandle(HollowAPI api) {
for(HollowWeakReferenceHandle handle : handles)
if(handle.isAPIHandled(api))
return true;
return false;
}
synchronized void newAPIHandle(HollowAPI api) {
for(HollowWeakReferenceHandle handle : handles)
handle.newAPIAvailable(api);
handles.add(new HollowWeakReferenceHandle(api));
}
private synchronized int countStaleReferenceExistenceSignals() {
int signals = 0;
for(HollowWeakReferenceHandle handle : handles) {
if(handle.isExistingStaleReferenceHint())
signals++;
}
return signals;
}
private synchronized int countStaleReferenceUsageSignals() {
int signals = 0;
for(HollowWeakReferenceHandle handle : handles) {
if(handle.hasBeenUsedSinceReset())
signals++;
}
return signals;
}
private synchronized void housekeeping() {
Iterator<HollowWeakReferenceHandle> iter = handles.iterator();
while(iter.hasNext()) {
HollowWeakReferenceHandle handle = iter.next();
handle.housekeeping();
if(handle.isFinished())
iter.remove();
}
}
public void startMonitoring() {
if (monitor == null) {
daemonThread(new Monitor(this), getClass(), "monitor")
.start();
}
}
public StackTraceRecorder getStaleReferenceStackTraceRecorder() {
return stackTraceRecorder;
}
private static class Monitor implements Runnable {
private final WeakReference<StaleHollowReferenceDetector> ref;
Monitor(StaleHollowReferenceDetector parent) {
this.ref = new WeakReference<>(parent);
}
public void run() {
while (ref.get() != null) {
try {
Thread.sleep(HOUSEKEEPING_INTERVAL);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
StaleHollowReferenceDetector parent = ref.get();
if (parent != null) {
parent.housekeeping();
parent.detector.staleReferenceExistenceDetected(parent.countStaleReferenceExistenceSignals());
parent.detector.staleReferenceUsageDetected(parent.countStaleReferenceUsageSignals());
}
}
}
}
private class HollowWeakReferenceHandle {
private final WeakReference<HollowAPI> apiHandle;
private final WeakReference<Object> siblingHandle;
private long gracePeriodBeginTimestamp = Long.MAX_VALUE;
private Object sibling;
private boolean usageDetected;
private boolean detached;
private HollowWeakReferenceHandle(HollowAPI stateEngine) {
this.apiHandle = new WeakReference<HollowAPI>(stateEngine);
this.sibling = new Object();
this.siblingHandle = new WeakReference<Object>(sibling);
}
private boolean isFinished() {
return !stateEngineIsReachable();
}
private boolean isExistingStaleReferenceHint() {
return stateEngineIsReachable() && !siblingIsReachable();
}
private boolean hasBeenUsedSinceReset() {
if(sibling == null) {
HollowAPI myAPI = apiHandle.get();
if(myAPI != null)
return myAPI.getDataAccess().hasSampleResults();
}
return false;
}
private void housekeeping() {
if(gracePeriodBeginTimestamp != Long.MAX_VALUE) {
if(shouldBeginUsageDetectionPeriod())
beginUsageDetectionPeriod();
if(shouldDetach())
detach();
setUpStackTraceRecording();
}
}
private boolean shouldDetach() {
if(!detached && System.currentTimeMillis() > (gracePeriodBeginTimestamp + config.gracePeriodMillis() + config.usageDetectionPeriodMillis())) {
if(config.forceDropData()) {
return true;
} else if(config.dropDataAutomatically()) {
if(usageDetected)
return false;
HollowAPI api = apiHandle.get();
if(api != null) {
HollowDataAccess dataAccess = api.getDataAccess();
if(dataAccess.hasSampleResults()) {
usageDetected = true;
return false;
}
return true;
}
}
}
return false;
}
private void detach() {
HollowAPI api = apiHandle.get();
if(api != null) {
HollowDataAccess dataAccess = api.getDataAccess();
if (dataAccess instanceof HollowProxyDataAccess)
((HollowProxyDataAccess) dataAccess).disableDataAccess();
else if (dataAccess instanceof HollowReadStateEngine)
((HollowReadStateEngine) dataAccess).invalidate();
api.detachCaches();
}
detached = true;
}
private boolean shouldBeginUsageDetectionPeriod() {
return sibling != null && System.currentTimeMillis() > (gracePeriodBeginTimestamp + config.gracePeriodMillis());
}
private void beginUsageDetectionPeriod() {
sibling = null;
HollowAPI hollowAPI = apiHandle.get();
if(hollowAPI != null) {
hollowAPI.getDataAccess().resetSampling();
hollowAPI.setSamplingDirector(ENABLED_SAMPLING_DIRECTOR);
}
}
private void setUpStackTraceRecording() {
HollowAPI api = apiHandle.get();
if(api != null) {
HollowDataAccess dataAccess = api.getDataAccess();
if(dataAccess instanceof HollowProxyDataAccess) {
HollowDataAccess proxiedDataAccess = ((HollowProxyDataAccess) dataAccess).getProxiedDataAccess();
if(proxiedDataAccess instanceof HollowHistoricalStateDataAccess)
((HollowHistoricalStateDataAccess)proxiedDataAccess).setStackTraceRecorder(config.enableExpiredUsageStackTraces() ? stackTraceRecorder : null);
}
}
}
private boolean stateEngineIsReachable() {
return apiHandle.get() != null;
}
private boolean siblingIsReachable() {
return siblingHandle.get() != null;
}
private boolean isAPIHandled(HollowAPI api) {
return apiHandle.get() == api;
}
private void newAPIAvailable(HollowAPI api) {
if(shouldBeginGracePeriod(api)) {
gracePeriodBeginTimestamp = System.currentTimeMillis();
}
}
private boolean shouldBeginGracePeriod(HollowAPI newAPI) {
if(gracePeriodBeginTimestamp != Long.MAX_VALUE)
return false;
HollowAPI myAPI = apiHandle.get();
if(myAPI == null)
return false;
if(myAPI == newAPI)
return false;
if(myAPI.getDataAccess() == newAPI.getDataAccess())
return false;
if(newAPI.getDataAccess() instanceof HollowProxyDataAccess && ((HollowProxyDataAccess)newAPI.getDataAccess()).getProxiedDataAccess() == myAPI.getDataAccess())
return false;
if(myAPI.getDataAccess() instanceof HollowProxyDataAccess && ((HollowProxyDataAccess)myAPI.getDataAccess()).getProxiedDataAccess() == newAPI.getDataAccess())
return false;
if(myAPI.getDataAccess() instanceof HollowProxyDataAccess
&& newAPI.getDataAccess() instanceof HollowProxyDataAccess
&& ((HollowProxyDataAccess)myAPI.getDataAccess()).getProxiedDataAccess() == ((HollowProxyDataAccess)newAPI.getDataAccess()).getProxiedDataAccess())
return false;
return true;
}
}
}
| 9,387 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/client/HollowClientUpdater.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.client;
import static com.netflix.hollow.core.HollowStateEngine.HEADER_TAG_SCHEMA_HASH;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.api.metrics.HollowConsumerMetrics;
import com.netflix.hollow.api.metrics.HollowMetricsCollector;
import com.netflix.hollow.core.HollowConstants;
import com.netflix.hollow.core.memory.MemoryMode;
import com.netflix.hollow.core.memory.pool.ArraySegmentRecycler;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.filter.HollowFilterConfig;
import com.netflix.hollow.core.read.filter.TypeFilter;
import com.netflix.hollow.core.schema.HollowSchemaHash;
import com.netflix.hollow.core.util.HollowObjectHashCodeFinder;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.logging.Logger;
/**
* A class comprising much of the internal state of a {@link HollowConsumer}. Not intended for external consumption.
*/
public class HollowClientUpdater {
private static final Logger LOG = Logger.getLogger(HollowClientUpdater.class.getName());
private volatile HollowDataHolder hollowDataHolderVolatile;
private final HollowUpdatePlanner planner;
private final CompletableFuture<Long> initialLoad;
private boolean forceDoubleSnapshot = false;
private final FailedTransitionTracker failedTransitionTracker;
private final StaleHollowReferenceDetector staleReferenceDetector;
private final CopyOnWriteArrayList<HollowConsumer.RefreshListener> refreshListeners;
private final HollowAPIFactory apiFactory;
private final HollowObjectHashCodeFinder hashCodeFinder;
private final MemoryMode memoryMode;
private final HollowConsumer.ObjectLongevityConfig objectLongevityConfig;
private final HollowConsumer.DoubleSnapshotConfig doubleSnapshotConfig;
private final HollowConsumerMetrics metrics;
private final HollowMetricsCollector<HollowConsumerMetrics> metricsCollector;
private boolean skipTypeShardUpdateWithNoAdditions;
private TypeFilter filter;
public HollowClientUpdater(HollowConsumer.BlobRetriever transitionCreator,
List<HollowConsumer.RefreshListener> refreshListeners,
HollowAPIFactory apiFactory,
HollowConsumer.DoubleSnapshotConfig doubleSnapshotConfig,
HollowObjectHashCodeFinder hashCodeFinder,
MemoryMode memoryMode,
HollowConsumer.ObjectLongevityConfig objectLongevityConfig,
HollowConsumer.ObjectLongevityDetector objectLongevityDetector,
HollowConsumerMetrics metrics,
HollowMetricsCollector<HollowConsumerMetrics> metricsCollector) {
this.planner = new HollowUpdatePlanner(transitionCreator, doubleSnapshotConfig);
this.failedTransitionTracker = new FailedTransitionTracker();
this.staleReferenceDetector = new StaleHollowReferenceDetector(objectLongevityConfig, objectLongevityDetector);
// Create a copy of the listeners, removing any duplicates
this.refreshListeners = new CopyOnWriteArrayList<>(
refreshListeners.stream().distinct().toArray(HollowConsumer.RefreshListener[]::new));
this.apiFactory = apiFactory;
this.hashCodeFinder = hashCodeFinder;
this.memoryMode = memoryMode;
this.doubleSnapshotConfig = doubleSnapshotConfig;
this.objectLongevityConfig = objectLongevityConfig;
this.staleReferenceDetector.startMonitoring();
this.metrics = metrics;
this.metricsCollector = metricsCollector;
this.initialLoad = new CompletableFuture<>();
}
public void setSkipShardUpdateWithNoAdditions(boolean skipTypeShardUpdateWithNoAdditions) {
this.skipTypeShardUpdateWithNoAdditions = skipTypeShardUpdateWithNoAdditions;
HollowDataHolder dataHolder = hollowDataHolderVolatile;
if(dataHolder != null)
dataHolder.getStateEngine().setSkipTypeShardUpdateWithNoAdditions(skipTypeShardUpdateWithNoAdditions);
}
/**
* Updates the client's state to the requested version, or to the version closest to but less than the requested version.
*
* @param requestedVersion the version to update the client to
* @return true if the update was either successfully completed and the updated version is the same as the requested version,
* or no updates were applied because the current version is the same as what the version would be after the updates were applied.
* false if the update completed but the client's updated version is not the same as the requested version, likely due to
* the requested version not being present in the data
* @throws IllegalArgumentException if no data could be retrieved for that version or any earlier versions
* @throws Throwable if any other exception occurred and the client could not be updated
*/
/*
* Note that this method is synchronized and it is the only method that modifies the
* {@code hollowDataHolderVolatile}, so we don't need to worry about it changing out from
* under us.
*/
public synchronized boolean updateTo(long requestedVersion) throws Throwable {
return updateTo(new HollowConsumer.VersionInfo(requestedVersion));
}
public synchronized boolean updateTo(HollowConsumer.VersionInfo requestedVersionInfo) throws Throwable {
long requestedVersion = requestedVersionInfo.getVersion();
if (requestedVersion == getCurrentVersionId()) {
if (requestedVersion == HollowConstants.VERSION_NONE && hollowDataHolderVolatile == null) {
LOG.warning("No versions to update to, initializing to empty state");
// attempting to refresh, but no available versions - initialize to empty state
hollowDataHolderVolatile = newHollowDataHolder();
forceDoubleSnapshotNextUpdate(); // intentionally ignore doubleSnapshotConfig
}
return true;
}
// Take a snapshot of the listeners to ensure additions or removals may occur concurrently
// but will not take effect until a subsequent refresh
final HollowConsumer.RefreshListener[] localListeners =
refreshListeners.toArray(new HollowConsumer.RefreshListener[0]);
for (HollowConsumer.RefreshListener listener : localListeners) {
listener.versionDetected(requestedVersionInfo);
}
long beforeVersion = getCurrentVersionId();
for (HollowConsumer.RefreshListener listener : localListeners) {
listener.refreshStarted(beforeVersion, requestedVersion);
}
try {
HollowUpdatePlan updatePlan = shouldCreateSnapshotPlan(requestedVersionInfo)
? planner.planInitializingUpdate(requestedVersion)
: planner.planUpdate(hollowDataHolderVolatile.getCurrentVersion(), requestedVersion,
doubleSnapshotConfig.allowDoubleSnapshot());
for (HollowConsumer.RefreshListener listener : localListeners)
if (listener instanceof HollowConsumer.TransitionAwareRefreshListener)
((HollowConsumer.TransitionAwareRefreshListener)listener).transitionsPlanned(beforeVersion, requestedVersion, updatePlan.isSnapshotPlan(), updatePlan.getTransitionSequence());
if (updatePlan.destinationVersion() == HollowConstants.VERSION_NONE
&& requestedVersion != HollowConstants.VERSION_LATEST) {
String msg = String.format("Could not create an update plan for version %s, because "
+ "that version or any qualifying previous versions could not be retrieved.", requestedVersion);
if (beforeVersion != HollowConstants.VERSION_NONE) {
msg += String.format(" Consumer will remain at current version %s until next update attempt.", beforeVersion);
}
throw new IllegalArgumentException(msg);
}
if (updatePlan.equals(HollowUpdatePlan.DO_NOTHING)
&& requestedVersion == HollowConstants.VERSION_LATEST)
throw new IllegalArgumentException("Could not create an update plan, because no existing versions could be retrieved.");
if (updatePlan.destinationVersion(requestedVersion) == getCurrentVersionId())
return true;
if (updatePlan.isSnapshotPlan()) { // 1 snapshot and 0+ delta transitions
HollowDataHolder oldDh = hollowDataHolderVolatile;
if (oldDh == null || doubleSnapshotConfig.allowDoubleSnapshot()) {
HollowDataHolder newDh = newHollowDataHolder();
try {
/* We need to assign the volatile field after API init since it may be
* accessed during the update plan application, for example via a refresh
* listener (such as a unique key indexer) that calls getAPI. If we do it after
* newDh.update(), refresh listeners will see the old API. If we do it
* before then we open ourselves up to a race where a caller will get back
* null if they call getAPI after assigning the volatile but before the API
* is initialized in HollowDataHolder#initializeAPI.
* Also note that hollowDataHolderVolatile only changes for snapshot plans,
* and it is only for snapshot plans that HollowDataHolder#initializeAPI is
* called. */
newDh.update(updatePlan, localListeners, () -> hollowDataHolderVolatile = newDh);
} catch (Throwable t) {
// If the update plan failed then revert back to the old holder
hollowDataHolderVolatile = oldDh;
throw t;
}
forceDoubleSnapshot = false;
}
} else { // 0 snapshot and 1+ delta transitions
hollowDataHolderVolatile.update(updatePlan, localListeners, () -> {});
}
for(HollowConsumer.RefreshListener refreshListener : localListeners)
refreshListener.refreshSuccessful(beforeVersion, getCurrentVersionId(), requestedVersion);
metrics.updateTypeStateMetrics(getStateEngine(), requestedVersion);
if(metricsCollector != null)
metricsCollector.collect(metrics);
initialLoad.complete(getCurrentVersionId()); // only set the first time
return getCurrentVersionId() == requestedVersion;
} catch(Throwable th) {
forceDoubleSnapshotNextUpdate();
metrics.updateRefreshFailed();
if(metricsCollector != null)
metricsCollector.collect(metrics);
for(HollowConsumer.RefreshListener refreshListener : localListeners)
refreshListener.refreshFailed(beforeVersion, getCurrentVersionId(), requestedVersion, th);
// intentionally omitting a call to initialLoad.completeExceptionally(th), for producers
// that write often a consumer has a chance to try another snapshot that might succeed
throw th;
}
}
public synchronized void addRefreshListener(HollowConsumer.RefreshListener refreshListener,
HollowConsumer c) {
if (refreshListener instanceof HollowConsumer.RefreshRegistrationListener) {
if (!refreshListeners.contains(refreshListener)) {
((HollowConsumer.RefreshRegistrationListener)refreshListener).onBeforeAddition(c);
}
refreshListeners.add(refreshListener);
} else {
refreshListeners.addIfAbsent(refreshListener);
}
}
public synchronized void removeRefreshListener(HollowConsumer.RefreshListener refreshListener,
HollowConsumer c) {
if (refreshListeners.remove(refreshListener)) {
if (refreshListener instanceof HollowConsumer.RefreshRegistrationListener) {
((HollowConsumer.RefreshRegistrationListener)refreshListener).onAfterRemoval(c);
}
}
}
public long getCurrentVersionId() {
HollowDataHolder hollowDataHolderLocal = hollowDataHolderVolatile;
return hollowDataHolderLocal != null ? hollowDataHolderLocal.getCurrentVersion()
: HollowConstants.VERSION_NONE;
}
public void forceDoubleSnapshotNextUpdate() {
this.forceDoubleSnapshot = true;
}
/**
* Whether or not a snapshot plan should be created. Visible for testing.
*/
boolean shouldCreateSnapshotPlan(HollowConsumer.VersionInfo incomingVersionInfo) {
if (getCurrentVersionId() == HollowConstants.VERSION_NONE
|| (forceDoubleSnapshot && doubleSnapshotConfig.allowDoubleSnapshot())) {
return true;
}
if (doubleSnapshotConfig.doubleSnapshotOnSchemaChange() == true) {
// double snapshot on schema change relies on presence of a header tag in incoming version metadata
if (incomingVersionInfo.getAnnouncementMetadata() == null
|| !incomingVersionInfo.getAnnouncementMetadata().isPresent()) {
LOG.warning("Double snapshots on schema change are enabled and its functioning depends on " +
"visibility into incoming version's schema through metadata but NO metadata was available " +
"for version " + incomingVersionInfo.getVersion() + ". Check that the mechanism that triggered " +
"the refresh (usually announcementWatcher) supports passing version metadata. This refresh will " +
"not be able to reflect any schema changes.");
return false;
}
Map<String, String> metadata = incomingVersionInfo.getAnnouncementMetadata().get();
if (metadata.get(HEADER_TAG_SCHEMA_HASH) == null) {
LOG.warning("Double snapshots on schema change are enabled but version metadata for incoming " +
"version " + incomingVersionInfo.getVersion() + " did not contain the required attribute (" +
HEADER_TAG_SCHEMA_HASH + "). Check that the producer supports setting this attribute. This " +
"refresh will not be able to reflect any schema changes.");
return false;
}
if (!doubleSnapshotConfig.allowDoubleSnapshot()) {
LOG.warning("Auto double snapshots on schema changes are enabled but double snapshots on consumer " +
"are prohibited by doubleSnapshotConfig. This refresh will not be able to reflect any schema changes.");
return false;
}
String incoming = metadata.get(HEADER_TAG_SCHEMA_HASH);
String current = (new HollowSchemaHash(getStateEngine().getSchemas())).getHash();
if (!current.equals(incoming)) {
return true;
}
}
return false;
}
private HollowDataHolder newHollowDataHolder() {
return new HollowDataHolder(newStateEngine(), apiFactory, memoryMode,
doubleSnapshotConfig, failedTransitionTracker,
staleReferenceDetector, objectLongevityConfig)
.setFilter(filter)
.setSkipTypeShardUpdateWithNoAdditions(skipTypeShardUpdateWithNoAdditions);
}
private HollowReadStateEngine newStateEngine() {
HollowDataHolder hollowDataHolderLocal = hollowDataHolderVolatile;
if (hollowDataHolderLocal != null) {
ArraySegmentRecycler existingRecycler =
hollowDataHolderLocal.getStateEngine().getMemoryRecycler();
return new HollowReadStateEngine(hashCodeFinder, true, existingRecycler);
}
return new HollowReadStateEngine(hashCodeFinder);
}
public StackTraceRecorder getStaleReferenceUsageStackTraceRecorder() {
return staleReferenceDetector.getStaleReferenceStackTraceRecorder();
}
public HollowReadStateEngine getStateEngine() {
HollowDataHolder hollowDataHolderLocal = hollowDataHolderVolatile;
return hollowDataHolderLocal == null ? null : hollowDataHolderLocal.getStateEngine();
}
public HollowAPI getAPI() {
HollowDataHolder hollowDataHolderLocal = hollowDataHolderVolatile;
return hollowDataHolderLocal == null ? null : hollowDataHolderLocal.getAPI();
}
public void setFilter(HollowFilterConfig filter) {
this.filter = filter;
}
public void setFilter(TypeFilter filter) {
this.filter = filter;
}
/**
* @return the number of failed snapshot transitions stored in the {@link FailedTransitionTracker}.
*/
public int getNumFailedSnapshotTransitions() {
return failedTransitionTracker.getNumFailedSnapshotTransitions();
}
/**
* @return the number of failed delta transitions stored in the {@link FailedTransitionTracker}.
*/
public int getNumFailedDeltaTransitions() {
return failedTransitionTracker.getNumFailedDeltaTransitions();
}
/**
* Clear any failed transitions from the {@link FailedTransitionTracker}, so that they may be reattempted when an update is triggered.
*/
public void clearFailedTransitions() {
this.failedTransitionTracker.clear();
}
/**
* @return a future that will be completed with the version of data loaded when the initial load of data
* has completed.
*/
public CompletableFuture<Long> getInitialLoad() {
return this.initialLoad;
}
}
| 9,388 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/client/HollowClientConsumerBridge.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.client;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.api.consumer.HollowConsumer.Blob;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
@SuppressWarnings("deprecation")
class HollowClientConsumerBridge {
static HollowConsumer.BlobRetriever consumerBlobRetrieverFor(final HollowBlobRetriever blobRetriever) {
return new HollowConsumer.BlobRetriever() {
@Override
public HollowConsumer.HeaderBlob retrieveHeaderBlob(long currentVersion) {
throw new UnsupportedOperationException();
}
@Override
public Blob retrieveSnapshotBlob(long desiredVersion) {
final HollowBlob blob = blobRetriever.retrieveSnapshotBlob(desiredVersion);
if(blob == null)
return null;
return new HollowConsumer.Blob(blob.getFromVersion(), blob.getToVersion()) {
@Override
public InputStream getInputStream() throws IOException {
return blob.getInputStream();
}
@Override
public File getFile() throws IOException {
return blob.getFile();
}
};
}
@Override
public Blob retrieveDeltaBlob(long currentVersion) {
final HollowBlob blob = blobRetriever.retrieveDeltaBlob(currentVersion);
if(blob == null)
return null;
return new HollowConsumer.Blob(blob.getFromVersion(), blob.getToVersion()) {
@Override
public InputStream getInputStream() throws IOException {
return blob.getInputStream();
}
@Override
public File getFile() throws IOException {
return blob.getFile();
}
};
}
@Override
public Blob retrieveReverseDeltaBlob(long currentVersion) {
final HollowBlob blob = blobRetriever.retrieveReverseDeltaBlob(currentVersion);
if(blob == null)
return null;
return new HollowConsumer.Blob(blob.getFromVersion(), blob.getToVersion()) {
@Override
public InputStream getInputStream() throws IOException {
return blob.getInputStream();
}
@Override
public File getFile() throws IOException {
return blob.getFile();
}
};
}
};
}
static HollowConsumer.RefreshListener consumerRefreshListenerFor(final HollowUpdateListener listener) {
return new HollowConsumer.AbstractRefreshListener() {
@Override
public void refreshStarted(long currentVersion, long requestedVersion) {
listener.refreshStarted(currentVersion, requestedVersion);
}
@Override
public void snapshotUpdateOccurred(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception {
listener.dataInitialized(api, stateEngine, version);
}
@Override
public void blobLoaded(final HollowConsumer.Blob transition) {
listener.transitionApplied(new HollowBlob(transition.getFromVersion(), transition.getToVersion()) {
@Override
public InputStream getInputStream() throws IOException {
return transition.getInputStream();
}
@Override
public File getFile() throws IOException {
return transition.getFile();
}
});
}
@Override
public void deltaUpdateOccurred(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception {
listener.dataUpdated(api, stateEngine, version);
}
@Override
public void refreshSuccessful(long beforeVersion, long afterVersion, long requestedVersion) {
listener.refreshCompleted(beforeVersion, afterVersion, requestedVersion);
}
@Override
public void refreshFailed(long beforeVersion, long afterVersion, long requestedVersion, Throwable failureCause) {
listener.refreshFailed(beforeVersion, afterVersion, requestedVersion, failureCause);
}
};
}
static HollowClientDoubleSnapshotConfig doubleSnapshotConfigFor(HollowClientMemoryConfig memoryConfig) {
return new HollowClientDoubleSnapshotConfig(memoryConfig);
}
static class HollowClientDoubleSnapshotConfig implements HollowConsumer.DoubleSnapshotConfig {
private final HollowClientMemoryConfig clientMemCfg;
private int maxDeltasBeforeDoubleSnapshot = 32;
private HollowClientDoubleSnapshotConfig(HollowClientMemoryConfig clientMemCfg) {
this.clientMemCfg = clientMemCfg;
}
@Override
public boolean allowDoubleSnapshot() {
return clientMemCfg.allowDoubleSnapshot();
}
@Override
public int maxDeltasBeforeDoubleSnapshot() {
return maxDeltasBeforeDoubleSnapshot;
}
public void setMaxDeltasBeforeDoubleSnapshot(int maxDeltas) {
this.maxDeltasBeforeDoubleSnapshot = maxDeltas;
}
}
}
| 9,389 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/client/HollowDataHolder.java | /*
* Copyright 2016-2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.client;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.api.consumer.HollowConsumer.TransitionAwareRefreshListener;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.core.HollowConstants;
import com.netflix.hollow.core.memory.MemoryMode;
import com.netflix.hollow.core.read.HollowBlobInput;
import com.netflix.hollow.core.read.OptionalBlobPartInput;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import com.netflix.hollow.core.read.dataaccess.proxy.HollowProxyDataAccess;
import com.netflix.hollow.core.read.engine.HollowBlobReader;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.filter.HollowFilterConfig;
import com.netflix.hollow.core.read.filter.TypeFilter;
import com.netflix.hollow.tools.history.HollowHistoricalStateCreator;
import com.netflix.hollow.tools.history.HollowHistoricalStateDataAccess;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.util.logging.Logger;
/**
* A class comprising much of the internal state of a {@link HollowConsumer}. Not intended for external consumption.
*/
class HollowDataHolder {
private static final Logger LOG = Logger.getLogger(HollowDataHolder.class.getName());
private final HollowReadStateEngine stateEngine;
private final HollowAPIFactory apiFactory;
private final MemoryMode memoryMode;
private final HollowBlobReader reader;
private final HollowConsumer.DoubleSnapshotConfig doubleSnapshotConfig;
private final FailedTransitionTracker failedTransitionTracker;
private final StaleHollowReferenceDetector staleReferenceDetector;
private final HollowConsumer.ObjectLongevityConfig objLongevityConfig;
private TypeFilter filter;
private HollowAPI currentAPI;
private WeakReference<HollowHistoricalStateDataAccess> priorHistoricalDataAccess;
private long currentVersion = HollowConstants.VERSION_NONE;
HollowDataHolder(HollowReadStateEngine stateEngine,
HollowAPIFactory apiFactory,
MemoryMode memoryMode,
HollowConsumer.DoubleSnapshotConfig doubleSnapshotConfig,
FailedTransitionTracker failedTransitionTracker,
StaleHollowReferenceDetector staleReferenceDetector,
HollowConsumer.ObjectLongevityConfig objLongevityConfig) {
this.stateEngine = stateEngine;
this.apiFactory = apiFactory;
this.memoryMode = memoryMode;
this.reader = new HollowBlobReader(stateEngine, memoryMode);
this.doubleSnapshotConfig = doubleSnapshotConfig;
this.failedTransitionTracker = failedTransitionTracker;
this.staleReferenceDetector = staleReferenceDetector;
this.objLongevityConfig = objLongevityConfig;
}
HollowReadStateEngine getStateEngine() {
return stateEngine;
}
HollowAPI getAPI() {
return currentAPI;
}
long getCurrentVersion() {
return currentVersion;
}
HollowDataHolder setFilter(HollowFilterConfig filter) {
/*
* This method is preserved for binary compat from before TypeFilter was introduced.
*/
return setFilter((TypeFilter)filter);
}
HollowDataHolder setFilter(TypeFilter filter) {
this.filter = filter;
return this;
}
HollowDataHolder setSkipTypeShardUpdateWithNoAdditions(boolean skipTypeShardUpdateWithNoAdditions) {
this.stateEngine.setSkipTypeShardUpdateWithNoAdditions(skipTypeShardUpdateWithNoAdditions);
return this;
}
void update(HollowUpdatePlan updatePlan, HollowConsumer.RefreshListener[] refreshListeners,
Runnable apiInitCallback) throws Throwable {
// Only fail if double snapshot is configured.
// This is a short term solution until it is decided to either remove this feature
// or refine it.
// If the consumer is configured to only follow deltas (no double snapshot) then
// any failure to transition will cause the consumer to become "stuck" on stale data
// unless the failed transitions are explicitly cleared or a new consumer is created.
// A transition failure is very broad encompassing many forms of transitory failure,
// such as network failures when accessing a blob, where the consumer might recover,
// such as when a new delta is published.
// Note that a refresh listener may also induce a failed transition, likely unknowingly,
// by throwing an exception.
if (doubleSnapshotConfig.allowDoubleSnapshot() && failedTransitionTracker.anyTransitionWasFailed(updatePlan)) {
throw new RuntimeException("Update plan contains known failing transition!");
}
if (updatePlan.isSnapshotPlan()) {
applySnapshotPlan(updatePlan, refreshListeners, apiInitCallback);
} else {
applyDeltaOnlyPlan(updatePlan, refreshListeners);
}
}
private void applySnapshotPlan(HollowUpdatePlan updatePlan,
HollowConsumer.RefreshListener[] refreshListeners,
Runnable apiInitCallback) throws Throwable {
applySnapshotTransition(updatePlan.getSnapshotTransition(), refreshListeners, apiInitCallback);
for(HollowConsumer.Blob blob : updatePlan.getDeltaTransitions()) {
applyDeltaTransition(blob, true, refreshListeners);
}
try {
for(HollowConsumer.RefreshListener refreshListener : refreshListeners)
refreshListener.snapshotUpdateOccurred(currentAPI, stateEngine, updatePlan.destinationVersion());
} catch(Throwable t) {
failedTransitionTracker.markAllTransitionsAsFailed(updatePlan);
throw t;
}
}
private void applySnapshotTransition(HollowConsumer.Blob snapshotBlob,
HollowConsumer.RefreshListener[] refreshListeners,
Runnable apiInitCallback) throws Throwable {
try (HollowBlobInput in = HollowBlobInput.modeBasedSelector(memoryMode, snapshotBlob);
OptionalBlobPartInput optionalPartIn = snapshotBlob.getOptionalBlobPartInputs()) {
applyStateEngineTransition(in, optionalPartIn, snapshotBlob, refreshListeners);
initializeAPI(apiInitCallback);
for (HollowConsumer.RefreshListener refreshListener : refreshListeners) {
if (refreshListener instanceof TransitionAwareRefreshListener)
((TransitionAwareRefreshListener)refreshListener).snapshotApplied(currentAPI, stateEngine, snapshotBlob.getToVersion());
}
} catch (Throwable t) {
failedTransitionTracker.markFailedTransition(snapshotBlob);
throw t;
}
}
private void applyStateEngineTransition(HollowBlobInput in, OptionalBlobPartInput optionalPartIn, HollowConsumer.Blob transition, HollowConsumer.RefreshListener[] refreshListeners) throws IOException {
if(transition.isSnapshot()) {
if(filter == null) {
reader.readSnapshot(in, optionalPartIn);
}
else {
reader.readSnapshot(in, optionalPartIn, filter);
}
} else {
reader.applyDelta(in, optionalPartIn);
}
setVersion(transition.getToVersion());
for(HollowConsumer.RefreshListener refreshListener : refreshListeners)
refreshListener.blobLoaded(transition);
}
private void initializeAPI(Runnable r) {
if (objLongevityConfig.enableLongLivedObjectSupport()) {
HollowProxyDataAccess dataAccess = new HollowProxyDataAccess();
dataAccess.setDataAccess(stateEngine);
currentAPI = apiFactory.createAPI(dataAccess);
} else {
currentAPI = apiFactory.createAPI(stateEngine);
}
staleReferenceDetector.newAPIHandle(currentAPI);
try {
r.run();
} catch (Throwable t) {
LOG.warning("Failed to execute API init callback: " + t);
}
}
private void applyDeltaOnlyPlan(HollowUpdatePlan updatePlan, HollowConsumer.RefreshListener[] refreshListeners) throws Throwable {
for(HollowConsumer.Blob blob : updatePlan) {
applyDeltaTransition(blob, false, refreshListeners);
}
}
private void applyDeltaTransition(HollowConsumer.Blob blob, boolean isSnapshotPlan, HollowConsumer.RefreshListener[] refreshListeners) throws Throwable {
if (!memoryMode.equals(MemoryMode.ON_HEAP)) {
LOG.warning("Skipping delta transition in shared-memory mode");
return;
}
try (HollowBlobInput in = HollowBlobInput.modeBasedSelector(memoryMode, blob);
OptionalBlobPartInput optionalPartIn = blob.getOptionalBlobPartInputs()) {
applyStateEngineTransition(in, optionalPartIn, blob, refreshListeners);
if(objLongevityConfig.enableLongLivedObjectSupport()) {
HollowDataAccess previousDataAccess = currentAPI.getDataAccess();
HollowHistoricalStateDataAccess priorState = new HollowHistoricalStateCreator(null).createBasedOnNewDelta(currentVersion, stateEngine);
HollowProxyDataAccess newDataAccess = new HollowProxyDataAccess();
newDataAccess.setDataAccess(stateEngine);
currentAPI = apiFactory.createAPI(newDataAccess, currentAPI);
if(previousDataAccess instanceof HollowProxyDataAccess)
((HollowProxyDataAccess)previousDataAccess).setDataAccess(priorState);
wireHistoricalStateChain(priorState);
} else {
if(currentAPI.getDataAccess() != stateEngine)
currentAPI = apiFactory.createAPI(stateEngine);
priorHistoricalDataAccess = null;
}
if(!staleReferenceDetector.isKnownAPIHandle(currentAPI))
staleReferenceDetector.newAPIHandle(currentAPI);
for(HollowConsumer.RefreshListener refreshListener : refreshListeners) {
if(!isSnapshotPlan)
refreshListener.deltaUpdateOccurred(currentAPI, stateEngine, blob.getToVersion());
if (refreshListener instanceof TransitionAwareRefreshListener)
((TransitionAwareRefreshListener)refreshListener).deltaApplied(currentAPI, stateEngine, blob.getToVersion());
}
} catch(Throwable t) {
failedTransitionTracker.markFailedTransition(blob);
throw t;
}
}
private void wireHistoricalStateChain(HollowHistoricalStateDataAccess nextPriorState) {
if(priorHistoricalDataAccess != null) {
HollowHistoricalStateDataAccess dataAccess = priorHistoricalDataAccess.get();
if(dataAccess != null) {
dataAccess.setNextState(nextPriorState);
}
}
priorHistoricalDataAccess = new WeakReference<HollowHistoricalStateDataAccess>(nextPriorState);
}
private void setVersion(long version) {
currentVersion = version;
}
}
| 9,390 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/client/HollowBlob.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.client;
import com.netflix.hollow.core.HollowConstants;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
/**
* A HollowBlob, which is either a snapshot or a delta, defines three things:
*
* <dl>
* <dt>The "from" version</dt>
* <dd>The unique identifier of the state to which a delta transition should be applied. If
* this is a snapshot, then this value is HollowConstants.VERSION_NONE.</dd>
*
* <dt>The "to" version</dt>
* <dd>The unique identifier of the state at which a dataset will arrive after this blob is applied.</dd>
*
* <dt>The actual blob data</dt>
* <dd>Implementations will define how to retrieve the actual blob data for this specific blob from a data store as an InputStream.</dd>
* </dl>
*
* @deprecated Extend the {@link com.netflix.hollow.api.consumer.HollowConsumer.Blob} for use with the
* {@link com.netflix.hollow.api.consumer.HollowConsumer.BlobRetriever} instead.
*/
@Deprecated
public abstract class HollowBlob {
private final long fromVersion;
private final long toVersion;
/**
* Instantiate a snapshot to a specified data state version.
*
* @param toVersion the version to end from
*/
public HollowBlob(long toVersion) {
this(HollowConstants.VERSION_NONE, toVersion);
}
/**
* Instantiate a delta from one data state version to another.
*
* @param fromVersion the version to start from
* @param toVersion the version to end from
*/
public HollowBlob(long fromVersion, long toVersion) {
this.fromVersion = fromVersion;
this.toVersion = toVersion;
}
/**
* Implementations will define how to retrieve the actual blob data for this specific transition from a data store.
*
* It is expected that the returned InputStream will not be interrupted. For this reason, it is a good idea to
* retrieve the entire blob (e.g. to disk) from a remote datastore prior to returning this stream.
*
* @return the input stream to the blob
* @throws IOException if the input stream to the blob cannot be obtained
*/
public abstract InputStream getInputStream() throws IOException;
public File getFile() throws IOException {
throw new UnsupportedOperationException();
}
public boolean isSnapshot() {
return fromVersion == HollowConstants.VERSION_NONE;
}
public boolean isReverseDelta() {
return toVersion < fromVersion;
}
public long getFromVersion() {
return fromVersion;
}
public long getToVersion() {
return toVersion;
}
}
| 9,391 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/client/HollowUpdatePlanner.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.client;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.core.HollowConstants;
/**
* The HollowUpdatePlanner defines the logic responsible for interacting with a {@link HollowBlobRetriever}
* to create a {@link HollowUpdatePlan}.
*/
public class HollowUpdatePlanner {
private final HollowConsumer.BlobRetriever transitionCreator;
private final HollowConsumer.DoubleSnapshotConfig doubleSnapshotConfig;
@Deprecated
public HollowUpdatePlanner(HollowBlobRetriever blobRetriever) {
this(HollowClientConsumerBridge.consumerBlobRetrieverFor(blobRetriever));
}
public HollowUpdatePlanner(HollowConsumer.BlobRetriever blobRetriever) {
this(blobRetriever, new HollowConsumer.DoubleSnapshotConfig() {
@Override
public int maxDeltasBeforeDoubleSnapshot() {
return 32;
}
@Override
public boolean allowDoubleSnapshot() {
return true;
}
});
}
public HollowUpdatePlanner(HollowConsumer.BlobRetriever transitionCreator, HollowConsumer.DoubleSnapshotConfig doubleSnapshotConfig) {
this.transitionCreator = transitionCreator;
this.doubleSnapshotConfig = doubleSnapshotConfig;
}
/**
* @return the sequence of steps necessary to initialize a hollow state engine to a given state.
* @param desiredVersion - The version to which the hollow state engine should be updated once the resultant steps are applied.
* @throws Exception if the plan cannot be initialized
*/
public HollowUpdatePlan planInitializingUpdate(long desiredVersion) throws Exception {
return planUpdate(HollowConstants.VERSION_NONE, desiredVersion, true);
}
/**
* @param currentVersion - The current version of the hollow state engine, or HollowConstants.VERSION_NONE if not yet initialized
* @param desiredVersion - The version to which the hollow state engine should be updated once the resultant steps are applied.
* @param allowSnapshot - Allow a snapshot plan to be created if the destination version is not reachable
* @return the sequence of steps necessary to bring a hollow state engine up to date.
* @throws Exception if the plan cannot be updated
*/
public HollowUpdatePlan planUpdate(long currentVersion, long desiredVersion, boolean allowSnapshot) throws Exception {
if(desiredVersion == currentVersion)
return HollowUpdatePlan.DO_NOTHING;
if (currentVersion == HollowConstants.VERSION_NONE)
return snapshotPlan(desiredVersion);
HollowUpdatePlan deltaPlan = deltaPlan(currentVersion, desiredVersion, doubleSnapshotConfig.maxDeltasBeforeDoubleSnapshot());
long deltaDestinationVersion = deltaPlan.destinationVersion(currentVersion);
if(deltaDestinationVersion != desiredVersion && allowSnapshot) {
HollowUpdatePlan snapshotPlan = snapshotPlan(desiredVersion);
long snapshotDestinationVersion = snapshotPlan.destinationVersion(currentVersion);
if(snapshotDestinationVersion == desiredVersion
|| ((deltaDestinationVersion > desiredVersion) && (snapshotDestinationVersion < desiredVersion))
|| ((snapshotDestinationVersion < desiredVersion) && (snapshotDestinationVersion > deltaDestinationVersion)))
return snapshotPlan;
}
return deltaPlan;
}
/**
* Returns an update plan that if executed will update the client to a version that is either equal to or as close to but
* less than the desired version as possible. This plan normally contains one snapshot transition and zero or more delta
* transitions but if no previous versions were found then an empty plan, {@code HollowUpdatePlan.DO_NOTHING}, is returned.
*
* @param desiredVersion The desired version to which the client wishes to update to, or update to as close to as possible but lesser than this version
* @return An update plan containing 1 snapshot transition and 0+ delta transitions if requested versions were found,
* or an empty plan, {@code HollowUpdatePlan.DO_NOTHING}, if no previous versions were found
*/
private HollowUpdatePlan snapshotPlan(long desiredVersion) {
HollowUpdatePlan plan = new HollowUpdatePlan();
long nearestPreviousSnapshotVersion = includeNearestSnapshot(plan, desiredVersion);
// The includeNearestSnapshot function returns a snapshot version that is less than or equal to the desired version
if(nearestPreviousSnapshotVersion > desiredVersion)
return HollowUpdatePlan.DO_NOTHING;
// If the nearest snapshot version is {@code HollowConstants.VERSION_LATEST} then no past snapshots were found, so
// skip the delta planning and the update plan does nothing
if(nearestPreviousSnapshotVersion == HollowConstants.VERSION_LATEST)
return HollowUpdatePlan.DO_NOTHING;
plan.appendPlan(deltaPlan(nearestPreviousSnapshotVersion, desiredVersion, Integer.MAX_VALUE));
return plan;
}
private HollowUpdatePlan deltaPlan(long currentVersion, long desiredVersion, int maxDeltas) {
HollowUpdatePlan plan = new HollowUpdatePlan();
if(currentVersion < desiredVersion) {
applyForwardDeltasToPlan(currentVersion, desiredVersion, plan, maxDeltas);
} else if(currentVersion > desiredVersion) {
applyReverseDeltasToPlan(currentVersion, desiredVersion, plan, maxDeltas);
}
return plan;
}
private long applyForwardDeltasToPlan(long currentVersion, long desiredVersion, HollowUpdatePlan plan, int maxDeltas) {
int transitionCounter = 0;
while(currentVersion < desiredVersion && transitionCounter < maxDeltas) {
currentVersion = includeNextDelta(plan, currentVersion, desiredVersion);
transitionCounter++;
}
return currentVersion;
};
private long applyReverseDeltasToPlan(long currentVersion, long desiredVersion, HollowUpdatePlan plan, int maxDeltas) {
long achievedVersion = currentVersion;
int transitionCounter = 0;
while (currentVersion > desiredVersion && transitionCounter < maxDeltas) {
currentVersion = includeNextReverseDelta(plan, currentVersion);
if (currentVersion != HollowConstants.VERSION_NONE)
achievedVersion = currentVersion;
transitionCounter++;
}
return achievedVersion;
}
/**
* Includes the next delta only if it will not take us *after* the desired version
*/
private long includeNextDelta(HollowUpdatePlan plan, long currentVersion, long desiredVersion) {
HollowConsumer.Blob transition = transitionCreator.retrieveDeltaBlob(currentVersion);
if(transition != null) {
if(transition.getToVersion() <= desiredVersion) {
plan.add(transition);
}
return transition.getToVersion();
}
return HollowConstants.VERSION_LATEST;
}
private long includeNextReverseDelta(HollowUpdatePlan plan, long currentVersion) {
HollowConsumer.Blob transition = transitionCreator.retrieveReverseDeltaBlob(currentVersion);
if(transition != null) {
plan.add(transition);
return transition.getToVersion();
}
return HollowConstants.VERSION_NONE;
}
private long includeNearestSnapshot(HollowUpdatePlan plan, long desiredVersion) {
HollowConsumer.Blob transition = transitionCreator.retrieveSnapshotBlob(desiredVersion);
if(transition != null) {
plan.add(transition);
return transition.getToVersion();
}
return HollowConstants.VERSION_LATEST;
}
}
| 9,392 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/client/HollowClient.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.client;
import static com.netflix.hollow.api.client.HollowAPIFactory.DEFAULT_FACTORY;
import static com.netflix.hollow.api.client.HollowClientConsumerBridge.consumerBlobRetrieverFor;
import static com.netflix.hollow.api.client.HollowClientConsumerBridge.consumerRefreshListenerFor;
import static com.netflix.hollow.api.client.HollowClientMemoryConfig.DEFAULT_CONFIG;
import static com.netflix.hollow.api.client.HollowUpdateListener.DEFAULT_LISTENER;
import com.netflix.hollow.api.client.HollowClientConsumerBridge.HollowClientDoubleSnapshotConfig;
import com.netflix.hollow.api.codegen.HollowAPIClassJavaGenerator;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.api.metrics.HollowConsumerMetrics;
import com.netflix.hollow.core.memory.MemoryMode;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
import com.netflix.hollow.core.read.filter.HollowFilterConfig;
import com.netflix.hollow.core.util.DefaultHashCodeFinder;
import com.netflix.hollow.core.util.HollowObjectHashCodeFinder;
import java.util.Collections;
/**
* A HollowClient is the top-level class used by consumers of HollowData to initialize and keep up-to-date a local in-memory
* copy of a hollow dataset. The interactions between the "blob" transition store and announcement listener are defined by
* this class, and the implementations of the data retrieval, announcement mechanism are abstracted in the interfaces which
* are injectable to this class.
*
* The following is injectable:
*
* <dl>
* <dt>{@link HollowBlobRetriever}</dt>
* <dd>Implementations of this class define how to retrieve blob data for consumption by this HollowClient.</dd>
*
* <dt>{@link HollowAnnouncementWatcher}</dt>
* <dd>Implementations of this class define the announcement mechanism, which is used to track the version of the
* currently announced state. It's also suggested that implementations will trigger a refresh when the current
* data version is updated.</dd>
*
* <dt>{@link HollowUpdateListener}</dt>
* <dd>Implementations of this class will define what to do when various events happen before, during, and after updating
* local in-memory copies of hollow data sets.</dd>
*
* <dt>{@link HollowAPIFactory}</dt>
* <dd>Defines how to create a {@link HollowAPI} for the dataset, useful when wrapping a dataset with an api which has
* been generated (via the {@link HollowAPIClassJavaGenerator})</dd>
*
* <dt>{@link HollowClientMemoryConfig}</dt>
* <dd>Defines various aspects of data access guarantees and update behavior which impact the heap footprint/GC behavior
* of hollow.</dd>
*
* </dl>
*
* Only an implementation of the HollowBlobRetriever is required to be injected, the other components may use default
* implementations.
*
* @deprecated Use the {@link HollowConsumer} API instead.
*
*/
@Deprecated
public class HollowClient {
protected final HollowAnnouncementWatcher announcementWatcher;
protected final HollowClientUpdater updater;
private final HollowClientDoubleSnapshotConfig doubleSnapshotConfig;
public HollowClient(HollowBlobRetriever blobRetriever) {
this(blobRetriever, new HollowAnnouncementWatcher.DefaultWatcher(), DEFAULT_LISTENER, DEFAULT_FACTORY, new DefaultHashCodeFinder(), DEFAULT_CONFIG);
}
public HollowClient(HollowBlobRetriever blobRetriever,
HollowAnnouncementWatcher announcementWatcher,
HollowUpdateListener updateListener,
HollowAPIFactory apiFactory,
HollowClientMemoryConfig memoryConfig) {
this(blobRetriever, announcementWatcher, updateListener, apiFactory, new DefaultHashCodeFinder(), memoryConfig);
}
public HollowClient(HollowBlobRetriever blobRetriever,
HollowAnnouncementWatcher announcementWatcher,
HollowUpdateListener updateListener,
HollowAPIFactory apiFactory,
HollowObjectHashCodeFinder hashCodeFinder,
HollowClientMemoryConfig memoryConfig) {
this.doubleSnapshotConfig = HollowClientConsumerBridge.doubleSnapshotConfigFor(memoryConfig);
HollowConsumerMetrics hollowConsumerMetrics = new HollowConsumerMetrics();
this.updater = new HollowClientUpdater(consumerBlobRetrieverFor(blobRetriever),
Collections.singletonList(consumerRefreshListenerFor(updateListener)),
apiFactory,
doubleSnapshotConfig,
hashCodeFinder,
MemoryMode.ON_HEAP,
memoryConfig,
updateListener,
hollowConsumerMetrics,
null);
this.announcementWatcher = announcementWatcher;
announcementWatcher.setClientToNotify(this);
}
/**
* Triggers a refresh to the latest version specified by the HollowAnnouncementWatcher.
* If already on the latest version, this operation is a no-op.
*
* This is a blocking call.
*/
public void triggerRefresh() {
try {
updater.updateTo(announcementWatcher.getLatestVersionInfo());
} catch(Throwable th) {
throw new RuntimeException(th);
}
}
/**
* Triggers a refresh to the latest version specified by the HollowAnnouncementWatcher.
* If already on the latest version, this operation is a no-op.
*
* This is an asynchronous call.
*/
public void triggerAsyncRefresh() {
announcementWatcher.triggerAsyncRefresh();
}
/**
* If the HollowAnnouncementWatcher supports setting an explicit version, this method will update
* to the specified version.
*
* Otherwise, an UnsupportedOperationException will be thrown.
*
* This is a blocking call.
*
* @param version the version to update to
*/
public void triggerRefreshTo(long version) {
announcementWatcher.setLatestVersion(version);
triggerRefresh();
}
/**
* Will force a double snapshot refresh on the next update.
*/
public void forceDoubleSnapshotNextUpdate() {
updater.forceDoubleSnapshotNextUpdate();
}
/**
* Will apply the filter (i.e. not load the excluded types and fields) on the next snapshot update.
* Subsequent updates will also ignore the types and fields.
*
* @param filter the filter configuration
*/
public void setFilter(HollowFilterConfig filter) {
updater.setFilter(filter);
}
/**
* Set the maximum number of deltas which will be followed by this client. If an update
* is triggered which attempts to traverse more than this number of double snapshots:
*
* <ul>
* <li>Will do a double snapshot if enabled, otherwise</li>
* <li>will traverse up to the specified number of deltas towards the desired state, then stop</li>
* </ul>
*
* @param maxDeltas the maximum number of deltas
*/
public void setMaxDeltas(int maxDeltas) {
doubleSnapshotConfig.setMaxDeltasBeforeDoubleSnapshot(maxDeltas);
}
/**
* Clear any failed transitions from the {@link FailedTransitionTracker}, so that they may be reattempted when an update is triggered.
*/
public void clearFailedTransitions() {
updater.clearFailedTransitions();
}
public StackTraceRecorder getStaleReferenceUsageStackTraceRecorder() {
return updater.getStaleReferenceUsageStackTraceRecorder();
}
/**
* @return the {@link HollowReadStateEngine} which is holding the underlying hollow dataset.
*/
public HollowReadStateEngine getStateEngine() {
return updater.getStateEngine();
}
/**
* @return the api which wraps the underlying dataset.
*/
public HollowAPI getAPI() {
return updater.getAPI();
}
/**
* @return the current version of the dataset. This is the unique identifier of the data's state.
*/
public long getCurrentVersionId() {
return updater.getCurrentVersionId();
}
public static class Builder {
private HollowBlobRetriever blobRetriever = null;
private HollowAnnouncementWatcher announcementWatcher = new HollowAnnouncementWatcher.DefaultWatcher();
private HollowUpdateListener updateListener = DEFAULT_LISTENER;
private HollowAPIFactory apiFactory = DEFAULT_FACTORY;
private HollowClientMemoryConfig memoryConfig = DEFAULT_CONFIG;
public HollowClient.Builder withBlobRetriever(HollowBlobRetriever blobRetriever) {
this.blobRetriever = blobRetriever;
return this;
}
public HollowClient.Builder withAnnouncementWatcher(HollowAnnouncementWatcher announcementWatcher) {
this.announcementWatcher = announcementWatcher;
return this;
}
public HollowClient.Builder withUpdateListener(HollowUpdateListener updateListener) {
this.updateListener = updateListener;
return this;
}
public HollowClient.Builder withAPIFactory(HollowAPIFactory apiFactory) {
this.apiFactory = apiFactory;
return this;
}
public <T extends HollowAPI> HollowClient.Builder withGeneratedAPIClass(Class<T> generatedAPIClass) {
this.apiFactory = new HollowAPIFactory.ForGeneratedAPI<T>(generatedAPIClass);
return this;
}
public HollowClient.Builder withMemoryConfig(HollowClientMemoryConfig memoryConfig) {
this.memoryConfig = memoryConfig;
return this;
}
public HollowClient build() {
if(blobRetriever == null)
throw new IllegalArgumentException("A HollowBlobRetriever must be specified when building a HollowClient");
return new HollowClient(blobRetriever, announcementWatcher, updateListener, apiFactory, memoryConfig);
}
}
}
| 9,393 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/client/HollowUpdateListener.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.client;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.core.read.engine.HollowReadStateEngine;
/**
* Implementations of this class will define what to do when various events happen before, during, and after updating
* local in-memory copies of hollow data sets.
*
* A default implementation, which does nothing, is available: {@link HollowUpdateListener#DEFAULT_LISTENER}.
*
* @deprecated Implement the {@link HollowConsumer.RefreshListener} and/or {@link HollowConsumer.ObjectLongevityDetector} for
* use with the {@link HollowConsumer} instead.
*
*/
@Deprecated
public interface HollowUpdateListener extends HollowConsumer.ObjectLongevityDetector {
/**
* Called whenever a state engine is initialized (either because the first snapshot was applied, or because a
* broken delta chain was found and a double snapshot occurred).
*
* This method should be used to initialize any indexing which is critical to keep in-sync with the data.
*
* @param api the API
* @param stateEngine the state engine
* @param version the version
* @throws Exception if there was an error processing this event
*/
void dataInitialized(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception;
/**
* Called whenever a state engine's data is updated.
*
* This method should be used to update any indexing which is critical to keep in-sync with the data.
*
* @param api the API
* @param stateEngine the state engine
* @param version the version
* @throws Exception if there was an error processing this event
*/
void dataUpdated(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception;
/**
* Indicates that a refresh has begun. Generally useful for logging.
*
* @param currentVersion the current version
* @param requestedVersion the requested version
*/
void refreshStarted(long currentVersion, long requestedVersion);
/**
* Indicates that a refresh completed successfully.
*
* @param beforeVersion - The version when the refresh started
* @param afterVersion - The version when the refresh completed
* @param requestedVersion - The specific version which was requested
*/
void refreshCompleted(long beforeVersion, long afterVersion, long requestedVersion);
/**
* Indicates that a refresh failed with an Exception.
*
* @param beforeVersion - The version when the refresh started
* @param afterVersion - The version when the refresh completed
* @param requestedVersion - The specific version which was requested
* @param failureCause - The Exception which caused the failure.
*/
void refreshFailed(long beforeVersion, long afterVersion, long requestedVersion, Throwable failureCause);
/**
* Called to indicate a transition was applied. Generally useful for logging or tracing of applied updates.
*
* @param transition The transition which was applied.
*/
void transitionApplied(HollowBlob transition);
HollowUpdateListener DEFAULT_LISTENER = new HollowUpdateListener() {
@Override public void dataInitialized(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception { }
@Override public void dataUpdated(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception { }
@Override public void transitionApplied(HollowBlob transition) { }
@Override public void refreshStarted(long currentVersion, long requestedVersion) { }
@Override public void refreshCompleted(long beforeVersion, long afterVersion, long requestedVersion) { }
@Override public void refreshFailed(long beforeVersion, long afterVersion, long requestedVersion, Throwable failureCause) { }
@Override public void staleReferenceExistenceDetected(int count) { }
@Override public void staleReferenceUsageDetected(int count) { }
};
}
| 9,394 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/client/HollowAnnouncementWatcher.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.client;
import static com.netflix.hollow.core.util.Threads.daemonThread;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.core.HollowConstants;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Implementations of this class are responsible for two things:
*
* 1) Tracking the latest announced data state version.
* 2) Keeping the client up to date by calling triggerAsyncRefresh() on self when the latest version changes.
*
* A default implementation {@link HollowAnnouncementWatcher.DefaultWatcher} is available. If this implementation
* is used, calling {@link HollowClient#triggerRefresh()} will always attempt to get to the latest state, unless an
* explicit state was specified via {@link HollowClient#triggerRefreshTo(long)}.
*
* @deprecated Implement the {@link HollowConsumer.AnnouncementWatcher} for use with the {@link HollowConsumer} instead.
*
*/
@Deprecated
public abstract class HollowAnnouncementWatcher {
private static final Logger log = Logger.getLogger(HollowAnnouncementWatcher.class.getName());
private final ExecutorService refreshExecutor;
/**
* Construct a HollowAnnouncementWatcher with a default ExecutorService.
*/
public HollowAnnouncementWatcher() {
refreshExecutor = Executors.newFixedThreadPool(1,
r -> daemonThread(r, getClass(), "watch"));
}
/**
* Construct a HollowAnnouncementWatcher with the specified ExecutorService.
*
* @param refreshExecutor the ExecutorService to use for asynchronous state refresh.
*/
public HollowAnnouncementWatcher(ExecutorService refreshExecutor) {
this.refreshExecutor = refreshExecutor;
}
/**
* @return the latest announced version info comprising announced version, metadata and pinned status of this version.
*/
public HollowConsumer.VersionInfo getLatestVersionInfo() {
return new HollowConsumer.VersionInfo(getLatestVersion());}
/**
* @return the latest announced version.
*/
public abstract long getLatestVersion();
/**
* If some push announcement mechanism is to be provided by this HollowAnnouncementWatcher, subscribe here.
* Alternatively, if some polling announcement mechanism is to be provided, setup the polling cycle here.
*
* When announcements are received, or polling reveals a new version, a call should be placed to triggerRefresh().
*/
public abstract void subscribeToEvents();
/**
* Override this method ONLY if it is legal to explicitly update to a specific version.
*
* @param latestVersion the latest version
*/
public void setLatestVersion(long latestVersion) {
throw new UnsupportedOperationException("Cannot explicitly set latest version on a " + this.getClass());
}
/**
* Will force a double snapshot refresh on the next update.
*/
protected void forceDoubleSnapshotNextUpdate() {
client.forceDoubleSnapshotNextUpdate();
}
/**
* Triggers a refresh in a new thread immediately.
*/
public void triggerAsyncRefresh() {
triggerAsyncRefreshWithDelay(0);
}
/**
* Triggers async refresh after some random number of milliseconds have passed,
* between now and the specified maximum number of milliseconds.
*
* Any subsequent calls for async refresh will not begin until after the specified delay
* has completed.
*
* @param maxDelayMillis the maximum delay in milliseconds
*/
public void triggerAsyncRefreshWithRandomDelay(int maxDelayMillis) {
Random rand = new Random();
int delayMillis = maxDelayMillis > 0 ? rand.nextInt(maxDelayMillis) : 0;
triggerAsyncRefreshWithDelay(delayMillis);
}
/**
* Triggers async refresh after the specified number of milliseconds has passed.
*
* Any subsequent calls for async refresh will not begin until after the specified delay
* has completed.
*
* @param delayMillis the delay in milliseconds
*/
public void triggerAsyncRefreshWithDelay(int delayMillis) {
final HollowClient client = this.client;
final long targetBeginTime = System.currentTimeMillis() + delayMillis;
refreshExecutor.execute(new Runnable() {
public void run() {
try {
long delay = targetBeginTime - System.currentTimeMillis();
if(delay > 0)
Thread.sleep(delay);
client.triggerRefresh();
} catch(Throwable th) {
log.log(Level.SEVERE, "Async refresh failed", th);
}
}
});
}
private HollowClient client;
protected HollowClient getClientToNotify() { return client; }
void setClientToNotify(HollowClient client) {
this.client = client;
subscribeToEvents();
}
public static class DefaultWatcher extends HollowAnnouncementWatcher {
private long latestVersion = HollowConstants.VERSION_LATEST;
public DefaultWatcher() {
super();
}
public DefaultWatcher(ExecutorService refreshExecutor) {
super(refreshExecutor);
}
@Override
public long getLatestVersion() {
/// by default, just try to fetch the maximum available version
return latestVersion;
}
@Override
public void subscribeToEvents() {
// by default, update events not available.
}
@Override
public void setLatestVersion(long latestVersion) {
this.latestVersion = latestVersion;
}
};
}
| 9,395 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/client/StackTraceRecorder.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.client;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
public class StackTraceRecorder {
private final int maxStackTraceElementsToRecord;
private final ConcurrentHashMap<String, StackTraceNode> rootNodes;
public StackTraceRecorder(int maxStackTraceElementsToRecord) {
this.maxStackTraceElementsToRecord = maxStackTraceElementsToRecord;
this.rootNodes = new ConcurrentHashMap<String, StackTraceRecorder.StackTraceNode>();
}
public void recordStackTrace() {
recordStackTrace(1);
}
public void recordStackTrace(int omitFirstNFrames) {
++omitFirstNFrames;
StackTraceElement[] stackTrace = new Throwable().getStackTrace();
if(stackTrace.length <= omitFirstNFrames)
return;
int maxFrameIndexToRecord = Math.min(stackTrace.length, maxStackTraceElementsToRecord + omitFirstNFrames);
StackTraceNode node = getNode(stackTrace[omitFirstNFrames], rootNodes);
node.increment();
for(int i=omitFirstNFrames+1;i<maxFrameIndexToRecord;i++) {
node = node.getChild(stackTrace[i]);
node.increment();
}
}
public Map<String, StackTraceNode> getRootNodes() {
return rootNodes;
}
public class StackTraceNode {
private final String traceLine;
private final AtomicInteger count;
private final ConcurrentHashMap<String, StackTraceNode> children;
public StackTraceNode(String traceLine) {
this.traceLine = traceLine;
this.count = new AtomicInteger(0);
this.children = new ConcurrentHashMap<String, StackTraceNode>(2);
}
public String getTraceLine() {
return traceLine;
}
public int getCount() {
return count.get();
}
public Map<String, StackTraceNode> getChildren() {
return children;
}
public void increment() {
count.incrementAndGet();
}
public StackTraceNode getChild(StackTraceElement element) {
return getNode(element, children);
}
}
public String toString() {
StringBuilder builder = new StringBuilder();
for(Map.Entry<String, StackTraceNode> entry : rootNodes.entrySet()) {
append(builder, entry.getValue(), 0);
}
return builder.toString();
}
private void append(StringBuilder builder, StackTraceNode node, int level) {
for(int i=0;i<level;i++)
builder.append(" ");
builder.append(node.getTraceLine()).append(" (").append(node.getCount()).append(")\n");
for(Map.Entry<String, StackTraceNode> entry : node.getChildren().entrySet()) {
append(builder, entry.getValue(), level + 1);
}
}
private StackTraceNode getNode(StackTraceElement element, ConcurrentHashMap<String, StackTraceNode> nodes) {
String line = element.toString();
StackTraceNode node = nodes.get(line);
if(node != null)
return node;
node = new StackTraceNode(line);
StackTraceNode existingNode = nodes.putIfAbsent(line, node);
return existingNode == null ? node : existingNode;
}
}
| 9,396 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/client/HollowAPIFactory.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.client;
import com.netflix.hollow.api.codegen.HollowAPIClassJavaGenerator;
import com.netflix.hollow.api.custom.HollowAPI;
import com.netflix.hollow.core.read.dataaccess.HollowDataAccess;
import java.lang.reflect.Constructor;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* An interface which can be implemented and passed to a {@link HollowClient} to inject the {@link HollowAPI} creation behavior.
*
* This is used to cause the HollowClient to create a specific api which has been generated (via the {@link HollowAPIClassJavaGenerator})
* to conform to a specific data model.
*
* A default implementation, which will create a default {@link HollowAPI} is available at {@link HollowAPIFactory#DEFAULT_FACTORY}.
*
*/
public interface HollowAPIFactory {
public HollowAPI createAPI(HollowDataAccess dataAccess);
public HollowAPI createAPI(HollowDataAccess dataAccess, HollowAPI previousCycleAPI);
public static HollowAPIFactory DEFAULT_FACTORY = new HollowAPIFactory() {
@Override
public HollowAPI createAPI(HollowDataAccess dataAccess) {
return new HollowAPI(dataAccess);
}
@Override
public HollowAPI createAPI(HollowDataAccess dataAccess, HollowAPI previousCycleAPI) {
return createAPI(dataAccess);
}
};
public static class ForGeneratedAPI<T extends HollowAPI> implements HollowAPIFactory {
private final Class<T> generatedAPIClass;
private final Set<String> cachedTypes;
public ForGeneratedAPI(Class<T> generatedAPIClass) {
this(generatedAPIClass, new String[0]);
}
public ForGeneratedAPI(Class<T> generatedAPIClass, String... cachedTypes) {
this.generatedAPIClass = generatedAPIClass;
this.cachedTypes = new HashSet<String>(Arrays.asList(cachedTypes));
}
@Override
public T createAPI(HollowDataAccess dataAccess) {
try {
Constructor<T> constructor = generatedAPIClass.getConstructor(HollowDataAccess.class, Set.class);
return constructor.newInstance(dataAccess, cachedTypes);
} catch (Exception e) {
try {
Constructor<T> constructor = generatedAPIClass.getConstructor(HollowDataAccess.class);
return constructor.newInstance(dataAccess);
} catch(Exception e2) {
throw new RuntimeException(e2);
}
}
}
@Override
public T createAPI(HollowDataAccess dataAccess, HollowAPI previousCycleAPI) {
try {
Constructor<T> constructor = generatedAPIClass.getConstructor(HollowDataAccess.class, Set.class, Map.class, generatedAPIClass);
return constructor.newInstance(dataAccess, cachedTypes, Collections.emptyMap(), previousCycleAPI);
} catch(Exception e) {
try {
Constructor<T> constructor = generatedAPIClass.getConstructor(HollowDataAccess.class);
return constructor.newInstance(dataAccess);
} catch(Exception e2) {
throw new RuntimeException(e2);
}
}
}
}
}
| 9,397 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/client/HollowBlobRetriever.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.client;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.core.HollowBlobHeader;
/**
* An interface which defines the necessary interactions of a {@link HollowClient} with a blob data store.
*
* Implementations will define how to retrieve blob data from a data store.
*
* @deprecated Implement the {@link HollowConsumer.BlobRetriever} for use with the {@link HollowConsumer} instead.
*/
@Deprecated
public interface HollowBlobRetriever {
/**
* @param desiredVersion the desired version
* @return the snapshot for the state with an identifier equal to or less than the desired version
*/
HollowBlob retrieveSnapshotBlob(long desiredVersion);
/**
* @param currentVersion the current version
* @return a delta transition which can be applied to the currentVersion
*/
HollowBlob retrieveDeltaBlob(long currentVersion);
/**
* @param currentVersion the current version
* @return a reverse delta transition which can be applied to the currentVersion
*/
HollowBlob retrieveReverseDeltaBlob(long currentVersion);
/**
* @param currentVersion the desired version
* @return the header for the state with an identifier equal to currentVersion
*/
default HollowBlobHeader retrieveHeaderBlob(long currentVersion) {
return new HollowBlobHeader();
}
}
| 9,398 |
0 | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api | Create_ds/hollow/hollow/src/main/java/com/netflix/hollow/api/client/HollowUpdatePlan.java | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.client;
import static java.util.stream.Collectors.toList;
import com.netflix.hollow.api.consumer.HollowConsumer;
import com.netflix.hollow.core.HollowConstants;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
/**
* A plan, containing one or more {@link HollowConsumer.Blob}s, which will be used to update the current data state to a desired data state.
*/
public class HollowUpdatePlan implements Iterable<HollowConsumer.Blob> {
public static HollowUpdatePlan DO_NOTHING = new HollowUpdatePlan(Collections.<HollowConsumer.Blob>emptyList());
private final List<HollowConsumer.Blob> transitions;
private HollowUpdatePlan(List<HollowConsumer.Blob> transitions) {
this.transitions = transitions;
}
public HollowUpdatePlan() {
this.transitions = new ArrayList<HollowConsumer.Blob>();
}
public boolean isSnapshotPlan() {
return !transitions.isEmpty() && transitions.get(0).isSnapshot();
}
public HollowConsumer.Blob getSnapshotTransition() {
if(!isSnapshotPlan())
return null;
return transitions.get(0);
}
public List<HollowConsumer.Blob> getDeltaTransitions() {
if(!isSnapshotPlan())
return transitions;
return transitions.subList(1, transitions.size());
}
public HollowConsumer.Blob getTransition(int index) {
return transitions.get(index);
}
public List<HollowConsumer.Blob> getTransitions() {
return transitions;
}
public List<HollowConsumer.Blob.BlobType> getTransitionSequence() {
return transitions.stream()
.map(t -> t.getBlobType())
.collect(toList());
}
public long destinationVersion(long currentVersion) {
long dest = destinationVersion();
return dest == HollowConstants.VERSION_NONE ? currentVersion : dest;
}
public long destinationVersion() {
return transitions.isEmpty() ? HollowConstants.VERSION_NONE
: transitions.get(transitions.size() - 1).getToVersion();
}
public int numTransitions() {
return transitions.size();
}
public void add(HollowConsumer.Blob transition) {
transitions.add(transition);
}
public void appendPlan(HollowUpdatePlan plan) {
transitions.addAll(plan.transitions);
}
@Override
public Iterator<HollowConsumer.Blob> iterator() {
return transitions.iterator();
}
}
| 9,399 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.