repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
dustinstanley/katharsis-framework
katharsis-spring/src/test/java/io/katharsis/spring/domain/repository/ProjectRepository.java
837
package io.katharsis.spring.domain.repository; import io.katharsis.queryParams.QueryParams; import io.katharsis.repository.ResourceRepository; import io.katharsis.spring.domain.model.Project; import org.springframework.stereotype.Component; @Component public class ProjectRepository implements ResourceRepository<Project, Long> { @Override public <S extends Project> S save(S entity) { return null; } @Override public Project findOne(Long aLong, QueryParams requestParams) { return null; } @Override public Iterable<Project> findAll(QueryParams requestParams) { return null; } @Override public Iterable<Project> findAll(Iterable<Long> projectIds, QueryParams requestParams) { return null; } @Override public void delete(Long aLong) { } }
apache-2.0
gurhann/AutoTimeTable
optaplanner-core/src/test/java/org/optaplanner/core/impl/testdata/domain/multivar/TestdataMultiVarEntity.java
3001
/* * Copyright 2013 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.optaplanner.core.impl.testdata.domain.multivar; import org.optaplanner.core.api.domain.entity.PlanningEntity; import org.optaplanner.core.api.domain.variable.PlanningVariable; import org.optaplanner.core.impl.domain.entity.descriptor.EntityDescriptor; import org.optaplanner.core.impl.domain.solution.descriptor.SolutionDescriptor; import org.optaplanner.core.impl.testdata.domain.TestdataObject; import org.optaplanner.core.impl.testdata.domain.TestdataValue; @PlanningEntity public class TestdataMultiVarEntity extends TestdataObject { public static EntityDescriptor buildEntityDescriptor() { SolutionDescriptor solutionDescriptor = TestdataMultiVarSolution.buildSolutionDescriptor(); return solutionDescriptor.findEntityDescriptorOrFail(TestdataMultiVarEntity.class); } private TestdataValue primaryValue; private TestdataValue secondaryValue; private TestdataOtherValue nullableOtherValue; public TestdataMultiVarEntity() { } public TestdataMultiVarEntity(String code) { super(code); } public TestdataMultiVarEntity(String code, TestdataValue primaryValue, TestdataValue secondaryValue, TestdataOtherValue nullableOtherValue) { super(code); this.primaryValue = primaryValue; this.secondaryValue = secondaryValue; this.nullableOtherValue = nullableOtherValue; } @PlanningVariable(valueRangeProviderRefs = "valueRange") public TestdataValue getPrimaryValue() { return primaryValue; } public void setPrimaryValue(TestdataValue primaryValue) { this.primaryValue = primaryValue; } @PlanningVariable(valueRangeProviderRefs = "valueRange") public TestdataValue getSecondaryValue() { return secondaryValue; } public void setSecondaryValue(TestdataValue secondaryValue) { this.secondaryValue = secondaryValue; } @PlanningVariable(valueRangeProviderRefs = "otherValueRange") public TestdataOtherValue getNullableOtherValue() { return nullableOtherValue; } public void setNullableOtherValue(TestdataOtherValue nullableOtherValue) { this.nullableOtherValue = nullableOtherValue; } // ************************************************************************ // Complex methods // ************************************************************************ }
apache-2.0
google/binnavi
src/main/java/com/google/security/zynamics/reil/translators/x86/PushaTranslator.java
2362
// Copyright 2011-2016 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.security.zynamics.reil.translators.x86; import com.google.security.zynamics.reil.OperandSize; import com.google.security.zynamics.reil.ReilInstruction; import com.google.security.zynamics.reil.translators.IInstructionTranslator; import com.google.security.zynamics.reil.translators.ITranslationEnvironment; import com.google.security.zynamics.reil.translators.InternalTranslationException; import com.google.security.zynamics.reil.translators.TranslationHelpers; import com.google.security.zynamics.zylib.disassembly.IInstruction; import java.util.List; /** * Translates PUSHA instructions to REIL code. */ public class PushaTranslator implements IInstructionTranslator { /** * Translates a PUSHA instruction to REIL code. * * @param environment A valid translation environment. * @param instruction The PUSHA instruction to translate. * @param instructions The generated REIL code will be added to this list * * @throws InternalTranslationException if any of the arguments are null the passed instruction is * not an PUSHA instruction */ @Override public void translate(final ITranslationEnvironment environment, final IInstruction instruction, final List<ReilInstruction> instructions) throws InternalTranslationException { TranslationHelpers.checkTranslationArguments(environment, instruction, instructions, "pusha"); if (instruction.getOperands().size() != 0) { throw new InternalTranslationException( "Error: Argument instruction is not a pusha instruction (invalid number of operands)"); } final long baseOffset = instruction.getAddress().toLong() * 0x100; Helpers.generatePushAllRegisters(environment, baseOffset, OperandSize.DWORD, instructions); } }
apache-2.0
tommyettinger/SquidSetup
src/main/java/com/github/czyzby/kiwi/util/gdx/scene2d/Alignment.java
3537
package com.github.czyzby.kiwi.util.gdx.scene2d; import com.badlogic.gdx.scenes.scene2d.ui.Cell; import com.badlogic.gdx.utils.Align; import com.badlogic.gdx.utils.IntMap; /** LibGDX alignments are simple integers and it's rather easy to make a mistake while using the aligning methods. This * enums wraps all default alignments, allowing to validate if the alignment value is actually correct. No word * separators were used to make constants match (ignoring case) with {@link Align} variables (and for faster LML * parsing). * * @author MJ * @see Align */ public enum Alignment { /** {@link Align#center} */ CENTER(Align.center), /** {@link Align#top} */ TOP(Align.top), /** {@link Align#bottom} */ BOTTOM(Align.bottom), /** {@link Align#left} */ LEFT(Align.left), /** {@link Align#right} */ RIGHT(Align.right), /** {@link Align#topLeft} */ TOPLEFT(Align.topLeft), /** {@link Align#topRight} */ TOPRIGHT(Align.topRight), /** {@link Align#bottomLeft} */ BOTTOMLEFT(Align.bottomLeft), /** {@link Align#bottomRight} */ BOTTOMRIGHT(Align.bottomRight); private final int alignment; private Alignment(final int alignment) { this.alignment = alignment; } /** @return value from LibGDX {@link Align} class represented by this enum's constant. Convenience alias method for * {@link #getAlignment()}. */ public int get() { return alignment; } /** @return value from LibGDX {@link Align} class represented by this enum's constant. */ public int getAlignment() { return alignment; } /** @param cell will have its alignment set. */ public void apply(final Cell<?> cell) { cell.align(alignment); } /** @return true for TOP, TOPLEFT and TOPRIGHT. */ public boolean isAlignedWithTop() { return (alignment & Align.top) != 0; } /** @return true for BOTTOM, BOTTOMLEFT and BOTTOMRIGHT. */ public boolean isAlignedWithBottom() { return (alignment & Align.bottom) != 0; } /** @return true for LEFT, BOTTOMLEFT and TOPLEFT. */ public boolean isAlignedWithLeft() { return (alignment & Align.left) != 0; } /** @return true for RIGHT, BOTTOMRIGHT and TOPRIGHT. */ public boolean isAlignedWithRight() { return (alignment & Align.right) != 0; } /** @return true for CENTER. */ public boolean isCentered() { return alignment == Align.center; } /** @param alignment value stored in {@link Align}. * @return Alignment enum constant with the same alignment value or null if alignment is invalid. */ public static Alignment get(final int alignment) { return Constants.ALIGNMENTS.get(alignment); } /** @param alignment value that might be stored in {@link Align}. * @return true if the alignment matches an exact value of one of {@link Align} fields. */ public static boolean isAlignmentValid(final int alignment) { return Constants.ALIGNMENTS.containsKey(alignment); } /** Utility class that allows to initiate static variables with enum's instances. * * @author MJ */ private static final class Constants { private static final IntMap<Alignment> ALIGNMENTS; static { ALIGNMENTS = new IntMap<Alignment>(Alignment.values().length); for (final Alignment alignment : values()) { ALIGNMENTS.put(alignment.alignment, alignment); } } } }
apache-2.0
dbmalkovsky/flowable-engine
modules/flowable-event-registry/src/main/java/org/flowable/eventregistry/impl/persistence/entity/data/impl/MybatisChannelDefinitionDataManager.java
8358
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.eventregistry.impl.persistence.entity.data.impl; import java.util.HashMap; import java.util.List; import java.util.Map; import org.flowable.common.engine.api.FlowableException; import org.flowable.eventregistry.api.ChannelDefinition; import org.flowable.eventregistry.impl.ChannelDefinitionQueryImpl; import org.flowable.eventregistry.impl.EventRegistryEngineConfiguration; import org.flowable.eventregistry.impl.persistence.entity.ChannelDefinitionEntity; import org.flowable.eventregistry.impl.persistence.entity.ChannelDefinitionEntityImpl; import org.flowable.eventregistry.impl.persistence.entity.data.AbstractEventDataManager; import org.flowable.eventregistry.impl.persistence.entity.data.ChannelDefinitionDataManager; public class MybatisChannelDefinitionDataManager extends AbstractEventDataManager<ChannelDefinitionEntity> implements ChannelDefinitionDataManager { public MybatisChannelDefinitionDataManager(EventRegistryEngineConfiguration eventRegistryConfiguration) { super(eventRegistryConfiguration); } @Override public Class<? extends ChannelDefinitionEntity> getManagedEntityClass() { return ChannelDefinitionEntityImpl.class; } @Override public ChannelDefinitionEntity create() { return new ChannelDefinitionEntityImpl(); } @Override public ChannelDefinitionEntity findLatestChannelDefinitionByKey(String channelDefinitionKey) { return (ChannelDefinitionEntity) getDbSqlSession().selectOne("selectLatestChannelDefinitionByKey", channelDefinitionKey); } @Override public ChannelDefinitionEntity findLatestChannelDefinitionByKeyAndTenantId(String channelDefinitionKey, String tenantId) { Map<String, Object> params = new HashMap<>(2); params.put("channelDefinitionKey", channelDefinitionKey); params.put("tenantId", tenantId); return (ChannelDefinitionEntity) getDbSqlSession().selectOne("selectLatestChannelDefinitionByKeyAndTenantId", params); } @Override public ChannelDefinitionEntity findLatestChannelDefinitionByKeyAndParentDeploymentId(String channelDefinitionKey, String parentDeploymentId) { Map<String, Object> params = new HashMap<>(2); params.put("channelDefinitionKey", channelDefinitionKey); params.put("parentDeploymentId", parentDeploymentId); return (ChannelDefinitionEntity) getDbSqlSession().selectOne("selectChannelDefinitionByKeyAndParentDeploymentId", params); } @Override public ChannelDefinitionEntity findLatestChannelDefinitionByKeyParentDeploymentIdAndTenantId(String channelDefinitionKey, String parentDeploymentId, String tenantId) { Map<String, Object> params = new HashMap<>(2); params.put("channelDefinitionKey", channelDefinitionKey); params.put("parentDeploymentId", parentDeploymentId); params.put("tenantId", tenantId); return (ChannelDefinitionEntity) getDbSqlSession().selectOne("selectChannelDefinitionByKeyParentDeploymentIdAndTenantId", params); } @Override public void deleteChannelDefinitionsByDeploymentId(String deploymentId) { getDbSqlSession().delete("deleteChannelDefinitionsByDeploymentId", deploymentId, getManagedEntityClass()); } @Override @SuppressWarnings("unchecked") public List<ChannelDefinition> findChannelDefinitionsByQueryCriteria(ChannelDefinitionQueryImpl ChannelDefinitionQuery) { return getDbSqlSession().selectList("selectChannelDefinitionsByQueryCriteria", ChannelDefinitionQuery); } @Override public long findChannelDefinitionCountByQueryCriteria(ChannelDefinitionQueryImpl ChannelDefinitionQuery) { return (Long) getDbSqlSession().selectOne("selectChannelDefinitionCountByQueryCriteria", ChannelDefinitionQuery); } @Override public ChannelDefinitionEntity findChannelDefinitionByDeploymentAndKey(String deploymentId, String channelDefinitionKey) { Map<String, Object> parameters = new HashMap<>(); parameters.put("deploymentId", deploymentId); parameters.put("channelDefinitionKey", channelDefinitionKey); return (ChannelDefinitionEntity) getDbSqlSession().selectOne("selectChannelDefinitionByDeploymentAndKey", parameters); } @Override public ChannelDefinitionEntity findChannelDefinitionByDeploymentAndKeyAndTenantId(String deploymentId, String channelDefinitionKey, String tenantId) { Map<String, Object> parameters = new HashMap<>(); parameters.put("deploymentId", deploymentId); parameters.put("channelDefinitionKey", channelDefinitionKey); parameters.put("tenantId", tenantId); return (ChannelDefinitionEntity) getDbSqlSession().selectOne("selectChannelDefinitionByDeploymentAndKeyAndTenantId", parameters); } @Override @SuppressWarnings("unchecked") public ChannelDefinitionEntity findChannelDefinitionByKeyAndVersion(String channelDefinitionKey, Integer eventVersion) { Map<String, Object> params = new HashMap<>(); params.put("channelDefinitionKey", channelDefinitionKey); params.put("eventVersion", eventVersion); List<ChannelDefinitionEntity> results = getDbSqlSession().selectList("selectChannelDefinitionsByKeyAndVersion", params); if (results.size() == 1) { return results.get(0); } else if (results.size() > 1) { throw new FlowableException("There are " + results.size() + " event definitions with key = '" + channelDefinitionKey + "' and version = '" + eventVersion + "'."); } return null; } @Override @SuppressWarnings("unchecked") public ChannelDefinitionEntity findChannelDefinitionByKeyAndVersionAndTenantId(String channelDefinitionKey, Integer eventVersion, String tenantId) { Map<String, Object> params = new HashMap<>(); params.put("channelDefinitionKey", channelDefinitionKey); params.put("eventVersion", eventVersion); params.put("tenantId", tenantId); List<ChannelDefinitionEntity> results = getDbSqlSession().selectList("selectChannelDefinitionsByKeyAndVersionAndTenantId", params); if (results.size() == 1) { return results.get(0); } else if (results.size() > 1) { throw new FlowableException("There are " + results.size() + " event definitions with key = '" + channelDefinitionKey + "' and version = '" + eventVersion + "'."); } return null; } @Override @SuppressWarnings("unchecked") public List<ChannelDefinition> findChannelDefinitionsByNativeQuery(Map<String, Object> parameterMap) { return getDbSqlSession().selectListWithRawParameter("selectChannelDefinitionByNativeQuery", parameterMap); } @Override public long findChannelDefinitionCountByNativeQuery(Map<String, Object> parameterMap) { return (Long) getDbSqlSession().selectOne("selectChannelDefinitionCountByNativeQuery", parameterMap); } @Override public void updateChannelDefinitionTenantIdForDeployment(String deploymentId, String newTenantId) { HashMap<String, Object> params = new HashMap<>(); params.put("deploymentId", deploymentId); params.put("tenantId", newTenantId); getDbSqlSession().update("updateChannelDefinitionTenantIdForDeploymentId", params); } @Override public void updateChannelDefinitionTypeAndImplementation(String channelDefinitionId, String type, String implementation) { Map<String, Object> params = new HashMap<>(); params.put("id", channelDefinitionId); params.put("type", type); params.put("implementation", implementation); getDbSqlSession().update("updateChannelDefinitionTypeAndImplementationById", params); } }
apache-2.0
eBaoTech/pinpoint
profiler/src/main/java/com/navercorp/pinpoint/profiler/instrument/ASMClass.java
29408
/* * Copyright 2016 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.profiler.instrument; import com.navercorp.pinpoint.bootstrap.instrument.ClassFilter; import com.navercorp.pinpoint.bootstrap.instrument.InstrumentClass; import com.navercorp.pinpoint.bootstrap.instrument.InstrumentContext; import com.navercorp.pinpoint.bootstrap.instrument.InstrumentException; import com.navercorp.pinpoint.bootstrap.instrument.InstrumentMethod; import com.navercorp.pinpoint.bootstrap.instrument.MethodFilter; import com.navercorp.pinpoint.bootstrap.instrument.MethodFilters; import com.navercorp.pinpoint.bootstrap.instrument.NotFoundInstrumentException; import com.navercorp.pinpoint.bootstrap.interceptor.annotation.TargetConstructor; import com.navercorp.pinpoint.bootstrap.interceptor.annotation.TargetConstructors; import com.navercorp.pinpoint.bootstrap.interceptor.annotation.TargetFilter; import com.navercorp.pinpoint.bootstrap.interceptor.annotation.TargetMethod; import com.navercorp.pinpoint.bootstrap.interceptor.annotation.TargetMethods; import com.navercorp.pinpoint.bootstrap.interceptor.scope.ExecutionPolicy; import com.navercorp.pinpoint.bootstrap.interceptor.scope.InterceptorScope; import com.navercorp.pinpoint.bootstrap.plugin.ObjectFactory; import com.navercorp.pinpoint.common.util.Asserts; import com.navercorp.pinpoint.exception.PinpointException; import com.navercorp.pinpoint.profiler.interceptor.registry.InterceptorRegistryBinder; import com.navercorp.pinpoint.profiler.metadata.ApiMetaDataService; import com.navercorp.pinpoint.profiler.objectfactory.AutoBindingObjectFactory; import com.navercorp.pinpoint.profiler.objectfactory.InterceptorArgumentProvider; import com.navercorp.pinpoint.profiler.objectfactory.ObjectBinderFactory; import com.navercorp.pinpoint.profiler.util.JavaAssistUtils; import org.objectweb.asm.Opcodes; import org.objectweb.asm.tree.ClassNode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * @author jaehong.kim */ public class ASMClass implements InstrumentClass { private static final String FIELD_PREFIX = "_$PINPOINT$_"; private final Logger logger = LoggerFactory.getLogger(this.getClass()); private final ObjectBinderFactory objectBinderFactory; private final InstrumentContext pluginContext; private final InterceptorRegistryBinder interceptorRegistryBinder; private final ApiMetaDataService apiMetaDataService; private final ClassLoader classLoader; private final ASMClassNodeAdapter classNode; private boolean modified = false; private String name; public ASMClass(ObjectBinderFactory objectBinderFactory, final InstrumentContext pluginContext, final InterceptorRegistryBinder interceptorRegistryBinder, ApiMetaDataService apiMetaDataService, final ClassLoader classLoader, final ClassNode classNode) { this(objectBinderFactory, pluginContext, interceptorRegistryBinder, apiMetaDataService, classLoader, new ASMClassNodeAdapter(pluginContext, classLoader, classNode)); } public ASMClass(ObjectBinderFactory objectBinderFactory, final InstrumentContext pluginContext, final InterceptorRegistryBinder interceptorRegistryBinder, ApiMetaDataService apiMetaDataService, final ClassLoader classLoader, final ASMClassNodeAdapter classNode) { if (objectBinderFactory == null) { throw new NullPointerException("objectBinderFactory must not be null"); } // if (pluginContext == null) { // throw new NullPointerException("pluginContext must not be null"); // } if (apiMetaDataService == null) { throw new NullPointerException("apiMetaDataService must not be null"); } this.objectBinderFactory = objectBinderFactory; this.pluginContext = pluginContext; this.interceptorRegistryBinder = interceptorRegistryBinder; this.apiMetaDataService = apiMetaDataService; this.classLoader = classLoader; this.classNode = classNode; // for performance. this.name = classNode.getName(); } public ClassLoader getClassLoader() { return this.classLoader; } @Override public boolean isInterceptable() { return !isInterface() && !isAnnotation() && !isModified(); } @Override public boolean isInterface() { return this.classNode.isInterface(); } private boolean isAnnotation() { return this.classNode.isAnnotation(); } @Override public String getName() { return this.name; } @Override public String getSuperClass() { return this.classNode.getSuperClassName(); } @Override public String[] getInterfaces() { return this.classNode.getInterfaceNames(); } @Override public InstrumentMethod getDeclaredMethod(final String name, final String... parameterTypes) { final String desc = JavaAssistUtils.javaTypeToJvmSignature(parameterTypes); final ASMMethodNodeAdapter methodNode = this.classNode.getDeclaredMethod(name, desc); if (methodNode == null) { return null; } return new ASMMethod(this.objectBinderFactory, this.pluginContext, this.interceptorRegistryBinder, apiMetaDataService, this, methodNode); } @Override public List<InstrumentMethod> getDeclaredMethods() { return getDeclaredMethods(MethodFilters.ACCEPT_ALL); } @Override public List<InstrumentMethod> getDeclaredMethods(final MethodFilter methodFilter) { if (methodFilter == null) { throw new NullPointerException("methodFilter must not be null"); } final List<InstrumentMethod> candidateList = new ArrayList<InstrumentMethod>(); for (ASMMethodNodeAdapter methodNode : this.classNode.getDeclaredMethods()) { final InstrumentMethod method = new ASMMethod(this.objectBinderFactory, this.pluginContext, this.interceptorRegistryBinder, apiMetaDataService, this, methodNode); if (methodFilter.accept(method)) { candidateList.add(method); } } return candidateList; } @Override public InstrumentMethod getConstructor(final String... parameterTypes) { return getDeclaredMethod("<init>", parameterTypes); } @Override public boolean hasDeclaredMethod(final String methodName, final String... parameterTypes) { final String desc = JavaAssistUtils.javaTypeToJvmSignature(parameterTypes); return this.classNode.hasDeclaredMethod(methodName, desc); } @Override public boolean hasMethod(final String methodName, final String... parameterTypes) { final String desc = JavaAssistUtils.javaTypeToJvmSignature(parameterTypes); return this.classNode.hasMethod(methodName, desc); } @Override public boolean hasEnclosingMethod(final String methodName, final String... parameterTypes) { final String desc = JavaAssistUtils.javaTypeToJvmSignature(parameterTypes); return this.classNode.hasOutClass(methodName, desc); } @Override public boolean hasConstructor(final String... parameterTypeArray) { return getConstructor(parameterTypeArray) == null ? false : true; } @Override public boolean hasField(String name, String type) { final String desc = type == null ? null : JavaAssistUtils.toJvmSignature(type); return this.classNode.getField(name, desc) != null; } @Override public boolean hasField(String name) { return hasField(name, null); } @Override public void weave(final String adviceClassName) throws InstrumentException { if (adviceClassName == null) { throw new NotFoundInstrumentException("advice class name must not be null"); } final ASMClassNodeAdapter adviceClassNode = ASMClassNodeAdapter.get(this.pluginContext, this.classLoader, JavaAssistUtils.javaNameToJvmName(adviceClassName)); if (adviceClassNode == null) { throw new NotFoundInstrumentException(adviceClassName + " not found."); } final ASMAspectWeaver aspectWeaver = new ASMAspectWeaver(); aspectWeaver.weaving(this.classNode, adviceClassNode); setModified(true); } @Override public InstrumentMethod addDelegatorMethod(final String methodName, final String... paramTypes) throws InstrumentException { // check duplicated method. if (getDeclaredMethod(methodName, paramTypes) != null) { throw new InstrumentException(getName() + " already have method(" + methodName + ")."); } final ASMClassNodeAdapter superClassNode = ASMClassNodeAdapter.get(this.pluginContext, this.classLoader, this.classNode.getSuperClassInternalName()); if (superClassNode == null) { throw new NotFoundInstrumentException(getName() + " not found super class(" + this.classNode.getSuperClassInternalName() + ")"); } final String desc = JavaAssistUtils.javaTypeToJvmSignature(paramTypes); final ASMMethodNodeAdapter superMethodNode = superClassNode.getDeclaredMethod(methodName, desc); if (superMethodNode == null) { throw new NotFoundInstrumentException(methodName + desc + " is not found in " + superClassNode.getInternalName()); } final ASMMethodNodeAdapter methodNode = this.classNode.addDelegatorMethod(superMethodNode); setModified(true); return new ASMMethod(this.objectBinderFactory, this.pluginContext, this.interceptorRegistryBinder, apiMetaDataService, this, methodNode); } @Override public void addField(final String accessorTypeName) throws InstrumentException { try { final Class<?> accessorType = this.pluginContext.injectClass(this.classLoader, accessorTypeName); final AccessorAnalyzer accessorAnalyzer = new AccessorAnalyzer(); final AccessorAnalyzer.AccessorDetails accessorDetails = accessorAnalyzer.analyze(accessorType); final ASMFieldNodeAdapter fieldNode = this.classNode.addField(FIELD_PREFIX + JavaAssistUtils.javaClassNameToVariableName(accessorTypeName), accessorDetails.getFieldType()); this.classNode.addInterface(accessorTypeName); this.classNode.addGetterMethod(accessorDetails.getGetter().getName(), fieldNode); this.classNode.addSetterMethod(accessorDetails.getSetter().getName(), fieldNode); setModified(true); } catch (Exception e) { throw new InstrumentException("Failed to add field with accessor [" + accessorTypeName + "]. Cause:" + e.getMessage(), e); } } @Override public void addGetter(final String getterTypeName, final String fieldName) throws InstrumentException { try { final Class<?> getterType = this.pluginContext.injectClass(this.classLoader, getterTypeName); final GetterAnalyzer.GetterDetails getterDetails = new GetterAnalyzer().analyze(getterType); final ASMFieldNodeAdapter fieldNode = this.classNode.getField(fieldName, null); if (fieldNode == null) { throw new IllegalArgumentException("Not found field. name=" + fieldName); } final String fieldTypeName = JavaAssistUtils.javaClassNameToObjectName(getterDetails.getFieldType().getName()); if (!fieldNode.getClassName().equals(fieldTypeName)) { throw new IllegalArgumentException("different return type. return=" + fieldTypeName + ", field=" + fieldNode.getClassName()); } this.classNode.addGetterMethod(getterDetails.getGetter().getName(), fieldNode); this.classNode.addInterface(getterTypeName); setModified(true); } catch (Exception e) { throw new InstrumentException("Failed to add getter: " + getterTypeName, e); } } @Override public void addSetter(String setterTypeName, String fieldName) throws InstrumentException { this.addSetter(setterTypeName, fieldName, false); } @Override public void addSetter(String setterTypeName, String fieldName, boolean removeFinal) throws InstrumentException { try { final Class<?> setterType = this.pluginContext.injectClass(this.classLoader, setterTypeName); final SetterAnalyzer.SetterDetails setterDetails = new SetterAnalyzer().analyze(setterType); final ASMFieldNodeAdapter fieldNode = this.classNode.getField(fieldName, null); if (fieldNode == null) { throw new IllegalArgumentException("Not found field. name=" + fieldName); } final String fieldTypeName = JavaAssistUtils.javaClassNameToObjectName(setterDetails.getFieldType().getName()); if (!fieldNode.getClassName().equals(fieldTypeName)) { throw new IllegalArgumentException("Argument type of the setter is different with the field type. setterMethod: " + fieldTypeName + ", fieldType: " + fieldNode.getClassName()); } if (fieldNode.isStatic()) { throw new IllegalArgumentException("Cannot add setter to static fields. setterMethod: " + setterDetails.getSetter().getName() + ", fieldName: " + fieldName); } final int original = fieldNode.getAccess(); boolean finalRemoved = false; if (fieldNode.isFinal()) { if (!removeFinal) { throw new IllegalArgumentException("Cannot add setter to final field. setterMethod: " + setterDetails.getSetter().getName() + ", fieldName: " + fieldName); } else { final int removed = original & ~Opcodes.ACC_FINAL; fieldNode.setAccess(removed); finalRemoved = true; } } try { this.classNode.addSetterMethod(setterDetails.getSetter().getName(), fieldNode); this.classNode.addInterface(setterTypeName); setModified(true); } catch (Exception e) { if (finalRemoved) { fieldNode.setAccess(original); } throw e; } } catch (Exception e) { throw new InstrumentException("Failed to add setter: " + setterTypeName, e); } } @Override public int addInterceptor(String interceptorClassName) throws InstrumentException { Asserts.notNull(interceptorClassName, "interceptorClassName"); return addInterceptor0(interceptorClassName, null, null, null); } @Override public int addInterceptor(String interceptorClassName, Object[] constructorArgs) throws InstrumentException { Asserts.notNull(interceptorClassName, "interceptorClassName"); Asserts.notNull(constructorArgs, "constructorArgs"); return addInterceptor0(interceptorClassName, constructorArgs, null, null); } @Override public int addScopedInterceptor(String interceptorClassName, String scopeName) throws InstrumentException { Asserts.notNull(interceptorClassName, "interceptorClassName"); Asserts.notNull(scopeName, "scopeName"); final InterceptorScope interceptorScope = this.pluginContext.getInterceptorScope(scopeName); return addInterceptor0(interceptorClassName, null, interceptorScope, ExecutionPolicy.BOUNDARY); } @Override public int addScopedInterceptor(String interceptorClassName, InterceptorScope scope) throws InstrumentException { Asserts.notNull(interceptorClassName, "interceptorClassName"); Asserts.notNull(scope, "scope"); return addInterceptor0(interceptorClassName, null, scope, ExecutionPolicy.BOUNDARY); } @Override public int addScopedInterceptor(String interceptorClassName, Object[] constructorArgs, String scopeName) throws InstrumentException { Asserts.notNull(interceptorClassName, "interceptorClassName"); Asserts.notNull(constructorArgs, "constructorArgs"); Asserts.notNull(scopeName, "scopeName"); final InterceptorScope interceptorScope = this.pluginContext.getInterceptorScope(scopeName); return addInterceptor0(interceptorClassName, constructorArgs, interceptorScope, ExecutionPolicy.BOUNDARY); } @Override public int addScopedInterceptor(String interceptorClassName, Object[] constructorArgs, InterceptorScope scope) throws InstrumentException { Asserts.notNull(interceptorClassName, "interceptorClassName"); Asserts.notNull(constructorArgs, "constructorArgs"); Asserts.notNull(scope, "scope"); return addInterceptor0(interceptorClassName, constructorArgs, scope, ExecutionPolicy.BOUNDARY); } @Override public int addScopedInterceptor(String interceptorClassName, String scopeName, ExecutionPolicy executionPolicy) throws InstrumentException { Asserts.notNull(interceptorClassName, "interceptorClassName"); Asserts.notNull(scopeName, "scopeName"); Asserts.notNull(executionPolicy, "executionPolicy"); final InterceptorScope interceptorScope = this.pluginContext.getInterceptorScope(scopeName); return addInterceptor0(interceptorClassName, null, interceptorScope, executionPolicy); } @Override public int addScopedInterceptor(String interceptorClassName, InterceptorScope scope, ExecutionPolicy executionPolicy) throws InstrumentException { Asserts.notNull(interceptorClassName, "interceptorClassName"); Asserts.notNull(scope, "scope"); Asserts.notNull(executionPolicy, "executionPolicy"); return addInterceptor0(interceptorClassName, null, scope, executionPolicy); } @Override public int addScopedInterceptor(String interceptorClassName, Object[] constructorArgs, String scopeName, ExecutionPolicy executionPolicy) throws InstrumentException { Asserts.notNull(interceptorClassName, "interceptorClassName"); Asserts.notNull(constructorArgs, "constructorArgs"); Asserts.notNull(scopeName, "scopeName"); Asserts.notNull(executionPolicy, "executionPolicy"); final InterceptorScope interceptorScope = this.pluginContext.getInterceptorScope(scopeName); return addInterceptor0(interceptorClassName, constructorArgs, interceptorScope, executionPolicy); } @Override public int addScopedInterceptor(String interceptorClassName, Object[] constructorArgs, InterceptorScope scope, ExecutionPolicy executionPolicy) throws InstrumentException { Asserts.notNull(interceptorClassName, "interceptorClassName"); Asserts.notNull(constructorArgs, "constructorArgs"); Asserts.notNull(scope, "scope"); Asserts.notNull(executionPolicy, "executionPolicy"); return addInterceptor0(interceptorClassName, constructorArgs, scope, executionPolicy); } private int addInterceptor0(String interceptorClassName, Object[] constructorArgs, InterceptorScope scope, ExecutionPolicy executionPolicy) throws InstrumentException { int interceptorId = -1; final Class<?> interceptorType = this.pluginContext.injectClass(this.classLoader, interceptorClassName); final TargetMethods targetMethods = interceptorType.getAnnotation(TargetMethods.class); if (targetMethods != null) { for (TargetMethod m : targetMethods.value()) { interceptorId = addInterceptor0(m, interceptorClassName, constructorArgs, scope, executionPolicy); } } final TargetMethod targetMethod = interceptorType.getAnnotation(TargetMethod.class); if (targetMethod != null) { interceptorId = addInterceptor0(targetMethod, interceptorClassName, constructorArgs, scope, executionPolicy); } final TargetConstructors targetConstructors = interceptorType.getAnnotation(TargetConstructors.class); if (targetConstructors != null) { for (TargetConstructor c : targetConstructors.value()) { interceptorId = addInterceptor0(c, interceptorClassName, scope, executionPolicy, constructorArgs); } } final TargetConstructor targetConstructor = interceptorType.getAnnotation(TargetConstructor.class); if (targetConstructor != null) { interceptorId = addInterceptor0(targetConstructor, interceptorClassName, scope, executionPolicy, constructorArgs); } final TargetFilter targetFilter = interceptorType.getAnnotation(TargetFilter.class); if (targetFilter != null) { interceptorId = addInterceptor0(targetFilter, interceptorClassName, scope, executionPolicy, constructorArgs); } if (interceptorId == -1) { throw new PinpointException("No target is specified. At least one of @Targets, @TargetMethod, @TargetConstructor, @TargetFilter must present. interceptor: " + interceptorClassName); } return interceptorId; } private int addInterceptor0(TargetConstructor c, String interceptorClassName, InterceptorScope scope, ExecutionPolicy executionPolicy, Object... constructorArgs) throws InstrumentException { final InstrumentMethod constructor = getConstructor(c.value()); if (constructor == null) { throw new NotFoundInstrumentException("Cannot find constructor with parameter types: " + Arrays.toString(c.value())); } // TODO casting fix return ((ASMMethod) constructor).addInterceptorInternal(interceptorClassName, constructorArgs, scope, executionPolicy); } private int addInterceptor0(TargetMethod m, String interceptorClassName, Object[] constructorArgs, InterceptorScope scope, ExecutionPolicy executionPolicy) throws InstrumentException { InstrumentMethod method = getDeclaredMethod(m.name(), m.paramTypes()); if (method == null) { throw new NotFoundInstrumentException("Cannot find method " + m.name() + " with parameter types: " + Arrays.toString(m.paramTypes())); } // TODO casting fix return ((ASMMethod) method).addInterceptorInternal(interceptorClassName, constructorArgs, scope, executionPolicy); } private int addInterceptor0(TargetFilter annotation, String interceptorClassName, InterceptorScope scope, ExecutionPolicy executionPolicy, Object[] constructorArgs) throws InstrumentException { final String filterTypeName = annotation.type(); Asserts.notNull(filterTypeName, "type of @TargetFilter"); final InterceptorArgumentProvider interceptorArgumentProvider = objectBinderFactory.newInterceptorArgumentProvider(this); final AutoBindingObjectFactory filterFactory = objectBinderFactory.newAutoBindingObjectFactory(pluginContext, classLoader, interceptorArgumentProvider); final ObjectFactory objectFactory = ObjectFactory.byConstructor(filterTypeName, (Object[]) annotation.constructorArguments()); final MethodFilter filter = (MethodFilter) filterFactory.createInstance(objectFactory); boolean singleton = annotation.singleton(); int interceptorId = -1; for (InstrumentMethod m : getDeclaredMethods(filter)) { if (singleton && interceptorId != -1) { m.addInterceptor(interceptorId); } else { // TODO casting fix interceptorId = ((ASMMethod) m).addInterceptorInternal(interceptorClassName, constructorArgs, scope, executionPolicy); } } if (interceptorId == -1) { logger.warn("No methods are intercepted. target: " + this.classNode.getInternalName(), ", interceptor: " + interceptorClassName + ", methodFilter: " + filterTypeName); } return interceptorId; } @Override public int addInterceptor(MethodFilter filter, String interceptorClassName) throws InstrumentException { Asserts.notNull(filter, "filter"); Asserts.notNull(interceptorClassName, "interceptorClassName"); return addScopedInterceptor0(filter, interceptorClassName, null, null, null); } @Override public int addInterceptor(MethodFilter filter, String interceptorClassName, Object[] constructorArgs) throws InstrumentException { Asserts.notNull(filter, "filter"); Asserts.notNull(interceptorClassName, "interceptorClassName"); Asserts.notNull(constructorArgs, "constructorArgs"); return addScopedInterceptor0(filter, interceptorClassName, constructorArgs, null, null); } @Override public int addScopedInterceptor(MethodFilter filter, String interceptorClassName, String scopeName, ExecutionPolicy executionPolicy) throws InstrumentException { Asserts.notNull(filter, "filter"); Asserts.notNull(interceptorClassName, "interceptorClassName"); Asserts.notNull(scopeName, "scopeName"); Asserts.notNull(executionPolicy, "executionPolicy"); final InterceptorScope interceptorScope = this.pluginContext.getInterceptorScope(scopeName); return addScopedInterceptor0(filter, interceptorClassName, null, interceptorScope, executionPolicy); } @Override public int addScopedInterceptor(MethodFilter filter, String interceptorClassName, InterceptorScope scope, ExecutionPolicy executionPolicy) throws InstrumentException { Asserts.notNull(filter, "filter"); Asserts.notNull(interceptorClassName, "interceptorClassName"); Asserts.notNull(scope, "scope"); Asserts.notNull(executionPolicy, "executionPolicy"); return addScopedInterceptor0(filter, interceptorClassName, null, scope, executionPolicy); } @Override public int addScopedInterceptor(MethodFilter filter, String interceptorClassName, Object[] constructorArgs, String scopeName, ExecutionPolicy executionPolicy) throws InstrumentException { Asserts.notNull(filter, "filter"); Asserts.notNull(interceptorClassName, "interceptorClassName"); Asserts.notNull(constructorArgs, "constructorArgs"); Asserts.notNull(scopeName, "scopeName"); Asserts.notNull(executionPolicy, "executionPolicy"); final InterceptorScope interceptorScope = this.pluginContext.getInterceptorScope(scopeName); return addScopedInterceptor0(filter, interceptorClassName, null, interceptorScope, executionPolicy); } @Override public int addScopedInterceptor(MethodFilter filter, String interceptorClassName, Object[] constructorArgs, InterceptorScope scope, ExecutionPolicy executionPolicy) throws InstrumentException { Asserts.notNull(filter, "filter"); Asserts.notNull(interceptorClassName, "interceptorClassName"); Asserts.notNull(constructorArgs, "constructorArgs"); Asserts.notNull(scope, "scope"); Asserts.notNull(executionPolicy, "executionPolicy"); return addScopedInterceptor0(filter, interceptorClassName, constructorArgs, scope, executionPolicy); } private int addScopedInterceptor0(MethodFilter filter, String interceptorClassName, Object[] constructorArgs, InterceptorScope scope, ExecutionPolicy executionPolicy) throws InstrumentException { int interceptorId = -1; for (InstrumentMethod m : getDeclaredMethods(filter)) { if (interceptorId != -1) { m.addInterceptor(interceptorId); } else { // TODO casting fix interceptorId = ((ASMMethod) m).addInterceptorInternal(interceptorClassName, constructorArgs, scope, executionPolicy); } } if (interceptorId == -1) { logger.warn("No methods are intercepted. target: " + this.classNode.getInternalName(), ", interceptor: " + interceptorClassName + ", methodFilter: " + filter.getClass().getName()); } return interceptorId; } @Override public List<InstrumentClass> getNestedClasses(ClassFilter filter) { final List<InstrumentClass> nestedClasses = new ArrayList<InstrumentClass>(); for (ASMClassNodeAdapter innerClassNode : this.classNode.getInnerClasses()) { final ASMNestedClass nestedClass = new ASMNestedClass(objectBinderFactory, this.pluginContext, this.interceptorRegistryBinder, apiMetaDataService, this.classLoader, innerClassNode); if (filter.accept(nestedClass)) { nestedClasses.add(nestedClass); } } return nestedClasses; } public boolean isModified() { return modified; } public void setModified(boolean modified) { this.modified = modified; } @Override public byte[] toBytecode() { return classNode.toByteArray(); } }
apache-2.0
n-tran/incubator-tinkerpop
gremlin-driver/src/main/java/org/apache/tinkerpop/gremlin/driver/Client.java
23794
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.driver; import org.apache.tinkerpop.gremlin.driver.exception.ConnectionException; import org.apache.tinkerpop.gremlin.driver.message.RequestMessage; import org.apache.tinkerpop.gremlin.process.traversal.TraversalSource; import org.apache.tinkerpop.gremlin.structure.Graph; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.stream.Collectors; /** * A {@code Client} is constructed from a {@link Cluster} and represents a way to send messages to Gremlin Server. * This class itself is a base class as there are different implementations that provide differing kinds of * functionality. See the implementations for specifics on their individual usage. * <p/> * The {@code Client} is designed to be re-used and shared across threads. * * @author Stephen Mallette (http://stephen.genoprime.com) */ public abstract class Client { private static final Logger logger = LoggerFactory.getLogger(Client.class); protected final Cluster cluster; protected volatile boolean initialized; Client(final Cluster cluster) { this.cluster = cluster; } /** * Makes any final changes to the builder and returns the constructed {@link RequestMessage}. Implementers * may choose to override this message to append data to the request before sending. By default, this method * will simply call the {@link org.apache.tinkerpop.gremlin.driver.message.RequestMessage.Builder#create()} and return * the {@link RequestMessage}. */ public RequestMessage buildMessage(final RequestMessage.Builder builder) { return builder.create(); } /** * Called in the {@link #init} method. */ protected abstract void initializeImplementation(); /** * Chooses a {@link Connection} to write the message to. */ protected abstract Connection chooseConnection(final RequestMessage msg) throws TimeoutException, ConnectionException; /** * Asynchronous close of the {@code Client}. */ public abstract CompletableFuture<Void> closeAsync(); /** * Create a new {@code Client} that aliases the specified {@link Graph} or {@link TraversalSource} name on the * server to a variable called "g" for the context of the requests made through that {@code Client}. * * @param graphOrTraversalSource rebinds the specified global Gremlin Server variable to "g" * @deprecated As of release 3.1.0, replaced by {@link #alias(String)} */ @Deprecated public abstract Client rebind(final String graphOrTraversalSource); /** * Create a new {@code Client} that aliases the specified {@link Graph} or {@link TraversalSource} name on the * server to a variable called "g" for the context of the requests made through that {@code Client}. * * @param graphOrTraversalSource rebinds the specified global Gremlin Server variable to "g" */ public abstract Client alias(final String graphOrTraversalSource); /** * Initializes the client which typically means that a connection is established to the server. Depending on the * implementation and configuration this blocking call may take some time. This method will be called * automatically if it is not called directly and multiple calls will not have effect. */ public synchronized Client init() { if (initialized) return this; logger.debug("Initializing client on cluster [{}]", cluster); cluster.init(); initializeImplementation(); initialized = true; return this; } /** * Submits a Gremlin script to the server and returns a {@link ResultSet} once the write of the request is * complete. * * @param gremlin the gremlin script to execute */ public ResultSet submit(final String gremlin) { return submit(gremlin, null); } /** * Submits a Gremlin script and bound parameters to the server and returns a {@link ResultSet} once the write of * the request is complete. If a script is to be executed repeatedly with slightly different arguments, prefer * this method to concatenating a Gremlin script from dynamically produced strings and sending it to * {@link #submit(String)}. Parameterized scripts will perform better. * * @param gremlin the gremlin script to execute * @param parameters a map of parameters that will be bound to the script on execution */ public ResultSet submit(final String gremlin, final Map<String, Object> parameters) { try { return submitAsync(gremlin, parameters).get(); } catch (Exception ex) { throw new RuntimeException(ex); } } /** * The asynchronous version of {@link #submit(String)} where the returned future will complete when the * write of the request completes. * * @param gremlin the gremlin script to execute */ public CompletableFuture<ResultSet> submitAsync(final String gremlin) { return submitAsync(gremlin, null); } /** * The asynchronous version of {@link #submit(String, Map)}} where the returned future will complete when the * write of the request completes. * * @param gremlin the gremlin script to execute * @param parameters a map of parameters that will be bound to the script on execution */ public CompletableFuture<ResultSet> submitAsync(final String gremlin, final Map<String, Object> parameters) { final RequestMessage.Builder request = RequestMessage.build(Tokens.OPS_EVAL) .add(Tokens.ARGS_GREMLIN, gremlin) .add(Tokens.ARGS_BATCH_SIZE, cluster.connectionPoolSettings().resultIterationBatchSize); Optional.ofNullable(parameters).ifPresent(params -> request.addArg(Tokens.ARGS_BINDINGS, parameters)); return submitAsync(buildMessage(request)); } /** * A low-level method that allows the submission of a manually constructed {@link RequestMessage}. */ public CompletableFuture<ResultSet> submitAsync(final RequestMessage msg) { if (!initialized) init(); final CompletableFuture<ResultSet> future = new CompletableFuture<>(); Connection connection = null; try { // the connection is returned to the pool once the response has been completed...see Connection.write() // the connection may be returned to the pool with the host being marked as "unavailable" connection = chooseConnection(msg); connection.write(msg, future); return future; } catch (TimeoutException toe) { // there was a timeout borrowing a connection throw new RuntimeException(toe); } catch (ConnectionException ce) { throw new RuntimeException(ce); } catch (Exception ex) { throw new RuntimeException(ex); } finally { if (logger.isDebugEnabled()) logger.debug("Submitted {} to - {}", msg, null == connection ? "connection not initialized" : connection.toString()); } } /** * Closes the client by making a synchronous call to {@link #closeAsync()}. */ public void close() { closeAsync().join(); } /** * A {@code Client} implementation that does not operate in a session. Requests are sent to multiple servers * given a {@link LoadBalancingStrategy}. Transactions are automatically committed * (or rolled-back on error) after each request. */ public final static class ClusteredClient extends Client { private ConcurrentMap<Host, ConnectionPool> hostConnectionPools = new ConcurrentHashMap<>(); ClusteredClient(final Cluster cluster) { super(cluster); } /** * Submits a Gremlin script to the server and returns a {@link ResultSet} once the write of the request is * complete. * * @param gremlin the gremlin script to execute */ public ResultSet submit(final String gremlin, final String graphOrTraversalSource) { return submit(gremlin, graphOrTraversalSource, null); } /** * Submits a Gremlin script and bound parameters to the server and returns a {@link ResultSet} once the write of * the request is complete. If a script is to be executed repeatedly with slightly different arguments, prefer * this method to concatenating a Gremlin script from dynamically produced strings and sending it to * {@link #submit(String)}. Parameterized scripts will perform better. * * @param gremlin the gremlin script to execute * @param parameters a map of parameters that will be bound to the script on execution * @param graphOrTraversalSource rebinds the specified global Gremlin Server variable to "g" */ public ResultSet submit(final String gremlin, final String graphOrTraversalSource, final Map<String, Object> parameters) { try { return submitAsync(gremlin, graphOrTraversalSource, parameters).get(); } catch (Exception ex) { throw new RuntimeException(ex); } } /** * The asynchronous version of {@link #submit(String, Map)}} where the returned future will complete when the * write of the request completes. * * @param gremlin the gremlin script to execute * @param parameters a map of parameters that will be bound to the script on execution * @param graphOrTraversalSource rebinds the specified global Gremlin Server variable to "g" */ public CompletableFuture<ResultSet> submitAsync(final String gremlin, final String graphOrTraversalSource, final Map<String, Object> parameters) { final RequestMessage.Builder request = RequestMessage.build(Tokens.OPS_EVAL) .add(Tokens.ARGS_GREMLIN, gremlin) .add(Tokens.ARGS_BATCH_SIZE, cluster.connectionPoolSettings().resultIterationBatchSize); Optional.ofNullable(parameters).ifPresent(params -> request.addArg(Tokens.ARGS_BINDINGS, parameters)); if (graphOrTraversalSource != null && !graphOrTraversalSource.isEmpty()) request.addArg(Tokens.ARGS_ALIASES, makeRebindings(graphOrTraversalSource)); return submitAsync(buildMessage(request)); } /** * The asynchronous version of {@link #submit(String, Map)}} where the returned future will complete when the * write of the request completes. * * @param gremlin the gremlin script to execute * @param parameters a map of parameters that will be bound to the script on execution * @param aliases aliases the specified global Gremlin Server variable some other name that then be used in the * script where the key is the alias name and the value represents the global variable on the * server */ public CompletableFuture<ResultSet> submitAsync(final String gremlin, final Map<String,String> aliases, final Map<String, Object> parameters) { final RequestMessage.Builder request = RequestMessage.build(Tokens.OPS_EVAL) .add(Tokens.ARGS_GREMLIN, gremlin) .add(Tokens.ARGS_BATCH_SIZE, cluster.connectionPoolSettings().resultIterationBatchSize); Optional.ofNullable(parameters).ifPresent(params -> request.addArg(Tokens.ARGS_BINDINGS, parameters)); if (aliases != null && !aliases.isEmpty()) request.addArg(Tokens.ARGS_ALIASES, aliases); return submitAsync(buildMessage(request)); } /** * {@inheritDoc} */ @Override @Deprecated public Client rebind(final String graphOrTraversalSource) { return alias(graphOrTraversalSource); } /** * {@inheritDoc} */ @Override public Client alias(String graphOrTraversalSource) { return new AliasClusteredClient(this, graphOrTraversalSource); } /** * Creates a {@code Client} that supplies the specified set of aliases, thus allowing the user to re-name * one or more globally defined {@link Graph} or {@link TraversalSource} server bindings for the context of * the created {@code Client}. */ @Deprecated public Client rebind(final Map<String,String> rebindings) { return alias(rebindings); } /** * Creates a {@code Client} that supplies the specified set of aliases, thus allowing the user to re-name * one or more globally defined {@link Graph} or {@link TraversalSource} server bindings for the context of * the created {@code Client}. */ public Client alias(final Map<String,String> aliases) { return new AliasClusteredClient(this, aliases); } /** * Uses a {@link LoadBalancingStrategy} to choose the best {@link Host} and then selects the best connection * from that host's connection pool. */ @Override protected Connection chooseConnection(final RequestMessage msg) throws TimeoutException, ConnectionException { final Iterator<Host> possibleHosts = this.cluster.loadBalancingStrategy().select(msg); if (!possibleHosts.hasNext()) throw new TimeoutException("Timed out waiting for an available host."); final Host bestHost = this.cluster.loadBalancingStrategy().select(msg).next(); final ConnectionPool pool = hostConnectionPools.get(bestHost); return pool.borrowConnection(cluster.connectionPoolSettings().maxWaitForConnection, TimeUnit.MILLISECONDS); } /** * Initializes the connection pools on all hosts. */ @Override protected void initializeImplementation() { cluster.allHosts().forEach(host -> { try { // hosts that don't initialize connection pools will come up as a dead host hostConnectionPools.put(host, new ConnectionPool(host, this)); // added a new host to the cluster so let the load-balancer know this.cluster.loadBalancingStrategy().onNew(host); } catch (Exception ex) { // catch connection errors and prevent them from failing the creation logger.warn("Could not initialize connection pool for {} - will try later", host); } }); } /** * Closes all the connection pools on all hosts. */ @Override public CompletableFuture<Void> closeAsync() { final CompletableFuture[] poolCloseFutures = new CompletableFuture[hostConnectionPools.size()]; hostConnectionPools.values().stream().map(ConnectionPool::closeAsync).collect(Collectors.toList()).toArray(poolCloseFutures); return CompletableFuture.allOf(poolCloseFutures); } private Map<String,String> makeRebindings(final String graphOrTraversalSource) { final Map<String,String> rebindings = new HashMap<>(); rebindings.put("g", graphOrTraversalSource); return rebindings; } } /** * Uses a {@link org.apache.tinkerpop.gremlin.driver.Client.ClusteredClient} that rebinds requests to a * specified {@link Graph} or {@link TraversalSource} instances on the server-side. */ public final static class AliasClusteredClient extends ReboundClusteredClient { public AliasClusteredClient(ClusteredClient clusteredClient, String graphOrTraversalSource) { super(clusteredClient, graphOrTraversalSource); } public AliasClusteredClient(ClusteredClient clusteredClient, Map<String, String> rebindings) { super(clusteredClient, rebindings); } } /** * Uses a {@link org.apache.tinkerpop.gremlin.driver.Client.ClusteredClient} that rebinds requests to a * specified {@link Graph} or {@link TraversalSource} instances on the server-side. * * @deprecated As of release 3.1.1-incubating, replaced by {@link AliasClusteredClient}. */ @Deprecated public static class ReboundClusteredClient extends Client { private final ClusteredClient clusteredClient; private final Map<String,String> aliases = new HashMap<>(); final CompletableFuture<Void> close = new CompletableFuture<>(); ReboundClusteredClient(final ClusteredClient clusteredClient, final String graphOrTraversalSource) { super(clusteredClient.cluster); this.clusteredClient = clusteredClient; aliases.put("g", graphOrTraversalSource); } ReboundClusteredClient(final ClusteredClient clusteredClient, final Map<String,String> rebindings) { super(clusteredClient.cluster); this.clusteredClient = clusteredClient; this.aliases.putAll(rebindings); } @Override public synchronized Client init() { if (close.isDone()) throw new IllegalStateException("Client is closed"); // the underlying client may not have been init'd clusteredClient.init(); return this; } @Override public RequestMessage buildMessage(final RequestMessage.Builder builder) { if (close.isDone()) throw new IllegalStateException("Client is closed"); if (!aliases.isEmpty()) builder.addArg(Tokens.ARGS_ALIASES, aliases); return builder.create(); } @Override protected void initializeImplementation() { // no init required if (close.isDone()) throw new IllegalStateException("Client is closed"); } /** * Delegates to the underlying {@link org.apache.tinkerpop.gremlin.driver.Client.ClusteredClient}. */ @Override protected Connection chooseConnection(final RequestMessage msg) throws TimeoutException, ConnectionException { if (close.isDone()) throw new IllegalStateException("Client is closed"); return clusteredClient.chooseConnection(msg); } /** * Prevents messages from being sent from this {@code Client}. Note that calling this method does not call * close on the {@code Client} that created it. */ @Override public CompletableFuture<Void> closeAsync() { close.complete(null); return close; } /** * {@inheritDoc} */ @Override @Deprecated public Client rebind(final String graphOrTraversalSource) { return alias(graphOrTraversalSource); } /** * {@inheritDoc} */ @Override public Client alias(String graphOrTraversalSource) { if (close.isDone()) throw new IllegalStateException("Client is closed"); return new AliasClusteredClient(clusteredClient, graphOrTraversalSource); } } /** * A {@code Client} implementation that operates in the context of a session. Requests are sent to a single * server, where each request is bound to the same thread with the same set of bindings across requests. * Transaction are not automatically committed. It is up the client to issue commit/rollback commands. */ public final static class SessionedClient extends Client { private final String sessionId; private ConnectionPool connectionPool; SessionedClient(final Cluster cluster, final String sessionId) { super(cluster); this.sessionId = sessionId; } String getSessionId() { return sessionId; } /** * The sessioned client does not support this feature. * * @throws UnsupportedOperationException * @deprecated As of release 3.1.0, replaced by {@link #alias(String)} */ @Deprecated @Override public Client rebind(final String graphOrTraversalSourceName){ throw new UnsupportedOperationException("Sessioned client does not support aliasing"); } /** * The sessioned client does not support this feature. * * @throws UnsupportedOperationException */ @Override public Client alias(String graphOrTraversalSource) { throw new UnsupportedOperationException("Sessioned client does not support aliasing"); } /** * Adds the {@link Tokens#ARGS_SESSION} value to every {@link RequestMessage}. */ @Override public RequestMessage buildMessage(final RequestMessage.Builder builder) { builder.processor("session"); builder.addArg(Tokens.ARGS_SESSION, sessionId); return builder.create(); } /** * Since the session is bound to a single host, simply borrow a connection from that pool. */ @Override protected Connection chooseConnection(final RequestMessage msg) throws TimeoutException, ConnectionException { return connectionPool.borrowConnection(cluster.connectionPoolSettings().maxWaitForConnection, TimeUnit.MILLISECONDS); } /** * Randomly choose an available {@link Host} to bind the session too and initialize the {@link ConnectionPool}. */ @Override protected void initializeImplementation() { // chooses an available host at random final List<Host> hosts = cluster.allHosts() .stream().filter(Host::isAvailable).collect(Collectors.toList()); Collections.shuffle(hosts); final Host host = hosts.get(0); connectionPool = new ConnectionPool(host, this, Optional.of(1), Optional.of(1)); } /** * Close the bound {@link ConnectionPool}. */ @Override public CompletableFuture<Void> closeAsync() { return connectionPool.closeAsync(); } } }
apache-2.0
skoulouzis/vlet-1.5.0
source/core/nl.uva.vlet.vfs.irods/irodssrc/edu/sdsc/grid/io/local/LocalFileSystem.java
8545
// Copyright (c) 2005, Regents of the University of California // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // * Neither the name of the University of California, San Diego (UCSD) nor // the names of its contributors may be used to endorse or promote products // derived from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // // FILE // LocalFileSystem.java - edu.sdsc.grid.io.local.LocalFileSystem // // CLASS HIERARCHY // java.lang.Object // | // +-.GeneralFileSystem // | // +-.LocalFileSystem // // PRINCIPAL AUTHOR // Lucas Gilbert, SDSC/UCSD // // package edu.sdsc.grid.io.local; import edu.sdsc.grid.io.*; import java.io.IOException; import java.io.FileNotFoundException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The LocalFileSystem class is the class for connection implementations to the * local file systems. It was added to the GeneralFileSystem tree to provide * compatibility and support for remote metadata queries. Unfortunately, local * filesystems cannot actually be queried. * * @author Lucas Gilbert, San Diego Supercomputer Center * @since Jargon1.0 */ public class LocalFileSystem extends GeneralFileSystem { /** * Default number of records returned by a query */ static final int DEFAULT_RECORDS_WANTED = 300; private static Logger log = LoggerFactory.getLogger(LocalFileSystem.class); /** * Opens a socket connection to read from and write to. Loads the default * Local user account information from their home directory. The account * information stored in this object cannot be changed once constructed. *<P> * This constructor is provided for convenience however, it is recommended * that all necessary data be sent to the constructor and not left to the * defaults. * * @throws FileNotFoundException * if the user data file cannot be found. * @throws IOException * if an IOException occurs. */ public LocalFileSystem() { this(new LocalAccount()); } /** * Opens a socket connection to read from and write to. Opens the account * held in the LocalAccount object. The account information stored in this * object cannot be changed once constructed. *<P> * This constructor is provided for convenience however, it is recommended * that all necessary data be sent to the constructor and not left to the * defaults. * * @param localAccount * the Local account information object. * @throws FileNotFoundException * if the user data file cannot be found. * @throws IOException * if an IOException occurs. */ public LocalFileSystem(LocalAccount localAccount) { if (localAccount == null) account = new LocalAccount(); else account = (LocalAccount) localAccount.clone(); } // ---------------------------------------------------------------------- // Setters and Getters // ---------------------------------------------------------------------- // General /** * Sets the <code>account</code> object. */ protected void setAccount(GeneralAccount account) { if (account == null) account = new LocalAccount(); else this.account = (LocalAccount) account.clone(); } /** * Returns the root directories of the local file system. */ public String[] getRootDirectories() { GeneralFile[] roots = (GeneralFile[]) LocalFile.listRoots(); String[] rootStrings = new String[roots.length]; for (int i = 0; i < roots.length; i++) { rootStrings[i] = roots[i].toString(); } return rootStrings; } // ---------------------------------------------------------------------- // GeneralFileSystem methods // ---------------------------------------------------------------------- /** * Queries the file system to find all files that match a set of conditions. * For all those that match, the fields indicated in the select array are * returned in the result object. */ public MetaDataRecordList[] query(MetaDataCondition[] conditions, MetaDataSelect[] selects) throws IOException { throw new UnsupportedOperationException(); } /** * Queries the file system to find all files that match a set of conditions. * For all those that match, the fields indicated in the select array are * returned in the result object. *<P> * While condition and select array objects have all been checked for * self-consistency during their construction, there are additional problems * that must be detected at query time: *<P> * <ul> * <li>Redundant selection fields * <li>Redundant query fields * <li>Fields incompatible with a file system * </ul> *<P> * For instance, it is possible to build a condition object appropriate for * the Local system, then pass that object in a local file system query. * That will find that the condition is incompatible and generate a mismatch * exception. *<P> * Query is implemented by the file-system-specific classes, like that for * the SRB, FTP, etc. Those classes must re-map condition and select field * names and operator codes to those required by a particular file system * and protocol version. Once re-mapped, they issue the query and get * results. The results are then mapped back to the standard public field * names of the MetaDataGroups. So, if a MetaDataGroup uses a name like * "file path", but the SRB calls it "data name", then query maps first from * "file path" to "data name" before issuing the query, and then from * "data name" back to "file path" within the results. The programmer using * this API should never see the internal field names. * * @param conditionArray * The conditional statements that describe the values to query * the system, like WHERE in SQL. * @param selectArray * The attributes to be returned from those values that met the * conditions, like SELECT in SQL. */ public MetaDataRecordList[] query(MetaDataCondition[] conditions, MetaDataSelect[] selects, int recordsWanted) throws IOException { throw new UnsupportedOperationException(); } /** * Tests this filesystem object for equality with the given object. Returns * <code>true</code> if and only if the argument is not <code>null</code> * and both are filesystem objects connected to the same filesystem using * the same account information. * * @param obj * The object to be compared with this abstract pathname * * @return <code>true</code> if and only if the objects are the same; * <code>false</code> otherwise */ public boolean equals(Object obj) { try { if (obj == null) return false; LocalFileSystem temp = (LocalFileSystem) obj; if (getAccount().equals(temp.getAccount())) { return true; } } catch (ClassCastException e) { return false; } return false; } /** * Tests the connection to the filesystem. Local always returns true. */ public boolean isConnected() { return true; } }
apache-2.0
alinvasile/httpcore
httpcore/src/main/java/org/apache/http/impl/NoConnectionReuseStrategy.java
2029
/* * $HeadURL$ * $Revision$ * $Date$ * * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.http.impl; import org.apache.http.ConnectionReuseStrategy; import org.apache.http.HttpResponse; import org.apache.http.protocol.HttpContext; /** * A strategy that never re-uses a connection. * * * @version $Revision$ * * @since 4.0 */ public class NoConnectionReuseStrategy implements ConnectionReuseStrategy { // default constructor // non-JavaDoc, see interface ConnectionReuseStrategy public boolean keepAlive(final HttpResponse response, final HttpContext context) { if (response == null) { throw new IllegalArgumentException("HTTP response may not be null"); } if (context == null) { throw new IllegalArgumentException("HTTP context may not be null"); } return false; } }
apache-2.0
zanebenefits/jdbi
src/main/java/org/skife/jdbi/v2/BeanMapper.java
3996
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.skife.jdbi.v2; import org.skife.jdbi.v2.tweak.ResultColumnMapper; import org.skife.jdbi.v2.tweak.ResultSetMapper; import java.beans.BeanInfo; import java.beans.IntrospectionException; import java.beans.Introspector; import java.beans.PropertyDescriptor; import java.lang.reflect.InvocationTargetException; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.util.HashMap; import java.util.Map; /** * A result set mapper which maps the fields in a statement into a JavaBean. This uses * the JDK's built in bean mapping facilities, so it does not support nested properties. */ public class BeanMapper<T> implements ResultSetMapper<T> { private final Class<T> type; private final Map<String, PropertyDescriptor> properties = new HashMap<String, PropertyDescriptor>(); public BeanMapper(Class<T> type) { this.type = type; try { BeanInfo info = Introspector.getBeanInfo(type); for (PropertyDescriptor descriptor : info.getPropertyDescriptors()) { properties.put(descriptor.getName().toLowerCase(), descriptor); } } catch (IntrospectionException e) { throw new IllegalArgumentException(e); } } @Override @SuppressWarnings({"unchecked", "rawtypes"}) public T map(int row, ResultSet rs, StatementContext ctx) throws SQLException { T bean; try { bean = type.newInstance(); } catch (Exception e) { throw new IllegalArgumentException(String.format("A bean, %s, was mapped " + "which was not instantiable", type.getName()), e); } ResultSetMetaData metadata = rs.getMetaData(); for (int i = 1; i <= metadata.getColumnCount(); ++i) { String name = metadata.getColumnLabel(i).toLowerCase(); PropertyDescriptor descriptor = properties.get(name); if (descriptor != null) { Class type = descriptor.getPropertyType(); Object value; ResultColumnMapper mapper = ctx.columnMapperFor(type); if (mapper != null) { value = mapper.mapColumn(rs, i, ctx); } else { value = rs.getObject(i); } try { descriptor.getWriteMethod().invoke(bean, value); } catch (IllegalAccessException e) { throw new IllegalArgumentException(String.format("Unable to access setter for " + "property, %s", name), e); } catch (InvocationTargetException e) { throw new IllegalArgumentException(String.format("Invocation target exception trying to " + "invoker setter for the %s property", name), e); } catch (NullPointerException e) { throw new IllegalArgumentException(String.format("No appropriate method to " + "write property %s", name), e); } } } return bean; } }
apache-2.0
tkobayas/optaplanner
optaplanner-core/src/test/java/org/optaplanner/core/impl/solver/termination/OrCompositeTerminationTest.java
8577
/* * Copyright 2020 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.optaplanner.core.impl.solver.termination; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.data.Offset.offset; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.util.Arrays; import org.junit.jupiter.api.Test; import org.optaplanner.core.impl.phase.scope.AbstractPhaseScope; import org.optaplanner.core.impl.solver.scope.SolverScope; public class OrCompositeTerminationTest { @Test public void solveTermination() { Termination termination1 = mock(Termination.class); Termination termination2 = mock(Termination.class); Termination compositeTermination = new OrCompositeTermination(termination1, termination2); SolverScope solverScope = mock(SolverScope.class); when(termination1.isSolverTerminated(solverScope)).thenReturn(false); when(termination2.isSolverTerminated(solverScope)).thenReturn(false); assertThat(compositeTermination.isSolverTerminated(solverScope)).isFalse(); when(termination1.isSolverTerminated(solverScope)).thenReturn(true); when(termination2.isSolverTerminated(solverScope)).thenReturn(false); assertThat(compositeTermination.isSolverTerminated(solverScope)).isTrue(); when(termination1.isSolverTerminated(solverScope)).thenReturn(false); when(termination2.isSolverTerminated(solverScope)).thenReturn(true); assertThat(compositeTermination.isSolverTerminated(solverScope)).isTrue(); when(termination1.isSolverTerminated(solverScope)).thenReturn(true); when(termination2.isSolverTerminated(solverScope)).thenReturn(true); assertThat(compositeTermination.isSolverTerminated(solverScope)).isTrue(); } @Test public void phaseTermination() { Termination termination1 = mock(Termination.class); Termination termination2 = mock(Termination.class); Termination compositeTermination = new OrCompositeTermination(Arrays.asList(termination1, termination2)); AbstractPhaseScope phaseScope = mock(AbstractPhaseScope.class); when(termination1.isPhaseTerminated(phaseScope)).thenReturn(false); when(termination2.isPhaseTerminated(phaseScope)).thenReturn(false); assertThat(compositeTermination.isPhaseTerminated(phaseScope)).isFalse(); when(termination1.isPhaseTerminated(phaseScope)).thenReturn(true); when(termination2.isPhaseTerminated(phaseScope)).thenReturn(false); assertThat(compositeTermination.isPhaseTerminated(phaseScope)).isTrue(); when(termination1.isPhaseTerminated(phaseScope)).thenReturn(false); when(termination2.isPhaseTerminated(phaseScope)).thenReturn(true); assertThat(compositeTermination.isPhaseTerminated(phaseScope)).isTrue(); when(termination1.isPhaseTerminated(phaseScope)).thenReturn(true); when(termination2.isPhaseTerminated(phaseScope)).thenReturn(true); assertThat(compositeTermination.isPhaseTerminated(phaseScope)).isTrue(); } @Test public void calculateSolverTimeGradientTest() { Termination termination1 = mock(Termination.class); Termination termination2 = mock(Termination.class); Termination compositeTermination = new OrCompositeTermination(Arrays.asList(termination1, termination2)); SolverScope solverScope = mock(SolverScope.class); when(termination1.calculateSolverTimeGradient(solverScope)).thenReturn(0.0); when(termination2.calculateSolverTimeGradient(solverScope)).thenReturn(0.0); // max(0.0,0.0) = 0.0 assertThat(compositeTermination.calculateSolverTimeGradient(solverScope)).isEqualTo(0.0, offset(0.0)); when(termination1.calculateSolverTimeGradient(solverScope)).thenReturn(0.5); when(termination2.calculateSolverTimeGradient(solverScope)).thenReturn(0.0); // max(0.5,0.0) = 0.5 assertThat(compositeTermination.calculateSolverTimeGradient(solverScope)).isEqualTo(0.5, offset(0.0)); when(termination1.calculateSolverTimeGradient(solverScope)).thenReturn(0.0); when(termination2.calculateSolverTimeGradient(solverScope)).thenReturn(0.5); // max(0.0,0.5) = 0.5 assertThat(compositeTermination.calculateSolverTimeGradient(solverScope)).isEqualTo(0.5, offset(0.0)); when(termination1.calculateSolverTimeGradient(solverScope)).thenReturn(-1.0); when(termination2.calculateSolverTimeGradient(solverScope)).thenReturn(-1.0); // Negative time gradient values are unsupported and ignored, max(unsupported,unsupported) = 0.0 (default) assertThat(compositeTermination.calculateSolverTimeGradient(solverScope)).isEqualTo(0.0, offset(0.0)); when(termination1.calculateSolverTimeGradient(solverScope)).thenReturn(0.5); when(termination2.calculateSolverTimeGradient(solverScope)).thenReturn(-1.0); // Negative time gradient values are unsupported and ignored, max(0.5,unsupported) = 0.5 assertThat(compositeTermination.calculateSolverTimeGradient(solverScope)).isEqualTo(0.5, offset(0.0)); when(termination1.calculateSolverTimeGradient(solverScope)).thenReturn(-1.0); when(termination2.calculateSolverTimeGradient(solverScope)).thenReturn(0.5); // Negative time gradient values are unsupported and ignored, max(unsupported,0.5) = 0.5 assertThat(compositeTermination.calculateSolverTimeGradient(solverScope)).isEqualTo(0.5, offset(0.0)); } @Test public void calculatePhaseTimeGradientTest() { Termination termination1 = mock(Termination.class); Termination termination2 = mock(Termination.class); Termination compositeTermination = new OrCompositeTermination(Arrays.asList(termination1, termination2)); AbstractPhaseScope phaseScope = mock(AbstractPhaseScope.class); when(termination1.calculatePhaseTimeGradient(phaseScope)).thenReturn(0.0); when(termination2.calculatePhaseTimeGradient(phaseScope)).thenReturn(0.0); // max(0.0,0.0) = 0.0 assertThat(compositeTermination.calculatePhaseTimeGradient(phaseScope)).isEqualTo(0.0, offset(0.0)); when(termination1.calculatePhaseTimeGradient(phaseScope)).thenReturn(0.5); when(termination2.calculatePhaseTimeGradient(phaseScope)).thenReturn(0.0); // max(0.5,0.0) = 0.5 assertThat(compositeTermination.calculatePhaseTimeGradient(phaseScope)).isEqualTo(0.5, offset(0.0)); when(termination1.calculatePhaseTimeGradient(phaseScope)).thenReturn(0.0); when(termination2.calculatePhaseTimeGradient(phaseScope)).thenReturn(0.5); // max(0.0,0.5) = 0.5 assertThat(compositeTermination.calculatePhaseTimeGradient(phaseScope)).isEqualTo(0.5, offset(0.0)); when(termination1.calculatePhaseTimeGradient(phaseScope)).thenReturn(-1.0); when(termination2.calculatePhaseTimeGradient(phaseScope)).thenReturn(-1.0); // Negative time gradient values are unsupported and ignored, max(unsupported,unsupported) = 0.0 (default) assertThat(compositeTermination.calculatePhaseTimeGradient(phaseScope)).isEqualTo(0.0, offset(0.0)); when(termination1.calculatePhaseTimeGradient(phaseScope)).thenReturn(0.5); when(termination2.calculatePhaseTimeGradient(phaseScope)).thenReturn(-1.0); // Negative time gradient values are unsupported and ignored, max(0.5,unsupported) = 0.5 assertThat(compositeTermination.calculatePhaseTimeGradient(phaseScope)).isEqualTo(0.5, offset(0.0)); when(termination1.calculatePhaseTimeGradient(phaseScope)).thenReturn(-1.0); when(termination2.calculatePhaseTimeGradient(phaseScope)).thenReturn(0.5); // Negative time gradient values are unsupported and ignored, max(unsupported,0.5) = 0.5 assertThat(compositeTermination.calculatePhaseTimeGradient(phaseScope)).isEqualTo(0.5, offset(0.0)); } }
apache-2.0
square/retrofit
retrofit-adapters/rxjava2/src/test/java/retrofit2/adapter/rxjava2/ObservableThrowingTest.java
10650
/* * Copyright (C) 2015 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofit2.adapter.rxjava2; import static okhttp3.mockwebserver.SocketPolicy.DISCONNECT_AFTER_REQUEST; import static org.assertj.core.api.Assertions.assertThat; import io.reactivex.Observable; import io.reactivex.Observer; import io.reactivex.disposables.Disposable; import io.reactivex.exceptions.CompositeException; import io.reactivex.exceptions.Exceptions; import io.reactivex.exceptions.UndeliverableException; import io.reactivex.plugins.RxJavaPlugins; import java.util.concurrent.atomic.AtomicReference; import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.MockWebServer; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestRule; import retrofit2.Response; import retrofit2.Retrofit; import retrofit2.http.GET; public final class ObservableThrowingTest { @Rule public final MockWebServer server = new MockWebServer(); @Rule public final TestRule resetRule = new RxJavaPluginsResetRule(); @Rule public final RecordingObserver.Rule subscriberRule = new RecordingObserver.Rule(); interface Service { @GET("/") Observable<String> body(); @GET("/") Observable<Response<String>> response(); @GET("/") Observable<Result<String>> result(); } private Service service; @Before public void setUp() { Retrofit retrofit = new Retrofit.Builder() .baseUrl(server.url("/")) .addConverterFactory(new StringConverterFactory()) .addCallAdapterFactory(RxJava2CallAdapterFactory.create()) .build(); service = retrofit.create(Service.class); } @Test public void bodyThrowingInOnNextDeliveredToError() { server.enqueue(new MockResponse()); RecordingObserver<String> observer = subscriberRule.create(); final RuntimeException e = new RuntimeException(); service .body() .subscribe( new ForwardingObserver<String>(observer) { @Override public void onNext(String value) { throw e; } }); observer.assertError(e); } @Test public void bodyThrowingInOnCompleteDeliveredToPlugin() { server.enqueue(new MockResponse()); final AtomicReference<Throwable> throwableRef = new AtomicReference<>(); RxJavaPlugins.setErrorHandler( throwable -> { if (!throwableRef.compareAndSet(null, throwable)) { throw Exceptions.propagate(throwable); } }); RecordingObserver<String> observer = subscriberRule.create(); final RuntimeException e = new RuntimeException(); service .body() .subscribe( new ForwardingObserver<String>(observer) { @Override public void onComplete() { throw e; } }); observer.assertAnyValue(); assertThat(throwableRef.get()).isInstanceOf(UndeliverableException.class).hasCause(e); } @Test public void bodyThrowingInOnErrorDeliveredToPlugin() { server.enqueue(new MockResponse().setResponseCode(404)); final AtomicReference<Throwable> throwableRef = new AtomicReference<>(); RxJavaPlugins.setErrorHandler( throwable -> { if (!throwableRef.compareAndSet(null, throwable)) { throw Exceptions.propagate(throwable); } }); RecordingObserver<String> observer = subscriberRule.create(); final AtomicReference<Throwable> errorRef = new AtomicReference<>(); final RuntimeException e = new RuntimeException(); service .body() .subscribe( new ForwardingObserver<String>(observer) { @Override public void onError(Throwable throwable) { if (!errorRef.compareAndSet(null, throwable)) { throw Exceptions.propagate(throwable); } throw e; } }); //noinspection ThrowableResultOfMethodCallIgnored CompositeException composite = (CompositeException) throwableRef.get(); assertThat(composite.getExceptions()).containsExactly(errorRef.get(), e); } @Test public void responseThrowingInOnNextDeliveredToError() { server.enqueue(new MockResponse()); RecordingObserver<Response<String>> observer = subscriberRule.create(); final RuntimeException e = new RuntimeException(); service .response() .subscribe( new ForwardingObserver<Response<String>>(observer) { @Override public void onNext(Response<String> value) { throw e; } }); observer.assertError(e); } @Test public void responseThrowingInOnCompleteDeliveredToPlugin() { server.enqueue(new MockResponse()); final AtomicReference<Throwable> throwableRef = new AtomicReference<>(); RxJavaPlugins.setErrorHandler( throwable -> { if (!throwableRef.compareAndSet(null, throwable)) { throw Exceptions.propagate(throwable); } }); RecordingObserver<Response<String>> observer = subscriberRule.create(); final RuntimeException e = new RuntimeException(); service .response() .subscribe( new ForwardingObserver<Response<String>>(observer) { @Override public void onComplete() { throw e; } }); observer.assertAnyValue(); assertThat(throwableRef.get()).isInstanceOf(UndeliverableException.class).hasCause(e); } @Test public void responseThrowingInOnErrorDeliveredToPlugin() { server.enqueue(new MockResponse().setSocketPolicy(DISCONNECT_AFTER_REQUEST)); final AtomicReference<Throwable> throwableRef = new AtomicReference<>(); RxJavaPlugins.setErrorHandler( throwable -> { if (!throwableRef.compareAndSet(null, throwable)) { throw Exceptions.propagate(throwable); } }); RecordingObserver<Response<String>> observer = subscriberRule.create(); final AtomicReference<Throwable> errorRef = new AtomicReference<>(); final RuntimeException e = new RuntimeException(); service .response() .subscribe( new ForwardingObserver<Response<String>>(observer) { @Override public void onError(Throwable throwable) { if (!errorRef.compareAndSet(null, throwable)) { throw Exceptions.propagate(throwable); } throw e; } }); //noinspection ThrowableResultOfMethodCallIgnored CompositeException composite = (CompositeException) throwableRef.get(); assertThat(composite.getExceptions()).containsExactly(errorRef.get(), e); } @Test public void resultThrowingInOnNextDeliveredToError() { server.enqueue(new MockResponse()); RecordingObserver<Result<String>> observer = subscriberRule.create(); final RuntimeException e = new RuntimeException(); service .result() .subscribe( new ForwardingObserver<Result<String>>(observer) { @Override public void onNext(Result<String> value) { throw e; } }); observer.assertError(e); } @Test public void resultThrowingInOnCompletedDeliveredToPlugin() { server.enqueue(new MockResponse()); final AtomicReference<Throwable> throwableRef = new AtomicReference<>(); RxJavaPlugins.setErrorHandler( throwable -> { if (!throwableRef.compareAndSet(null, throwable)) { throw Exceptions.propagate(throwable); } }); RecordingObserver<Result<String>> observer = subscriberRule.create(); final RuntimeException e = new RuntimeException(); service .result() .subscribe( new ForwardingObserver<Result<String>>(observer) { @Override public void onComplete() { throw e; } }); observer.assertAnyValue(); assertThat(throwableRef.get()).isInstanceOf(UndeliverableException.class).hasCause(e); } @Test public void resultThrowingInOnErrorDeliveredToPlugin() { server.enqueue(new MockResponse()); final AtomicReference<Throwable> throwableRef = new AtomicReference<>(); RxJavaPlugins.setErrorHandler( throwable -> { if (!throwableRef.compareAndSet(null, throwable)) { throw Exceptions.propagate(throwable); } }); RecordingObserver<Result<String>> observer = subscriberRule.create(); final RuntimeException first = new RuntimeException(); final RuntimeException second = new RuntimeException(); service .result() .subscribe( new ForwardingObserver<Result<String>>(observer) { @Override public void onNext(Result<String> value) { // The only way to trigger onError for a result is if onNext throws. throw first; } @Override public void onError(Throwable throwable) { throw second; } }); //noinspection ThrowableResultOfMethodCallIgnored CompositeException composite = (CompositeException) throwableRef.get(); assertThat(composite.getExceptions()).containsExactly(first, second); } private abstract static class ForwardingObserver<T> implements Observer<T> { private final Observer<T> delegate; ForwardingObserver(Observer<T> delegate) { this.delegate = delegate; } @Override public void onSubscribe(Disposable disposable) { delegate.onSubscribe(disposable); } @Override public void onNext(T value) { delegate.onNext(value); } @Override public void onError(Throwable throwable) { delegate.onError(throwable); } @Override public void onComplete() { delegate.onComplete(); } } }
apache-2.0
korrelate/pig
src/org/apache/pig/pen/FetchBaseData.java
3455
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.pen; import java.io.IOException; import java.util.HashMap; import java.util.Map; import org.apache.pig.backend.executionengine.ExecPhysicalOperator; import org.apache.pig.backend.local.executionengine.POLoad; import org.apache.pig.data.BagFactory; import org.apache.pig.data.DataBag; import org.apache.pig.data.Tuple; import org.apache.pig.impl.PigContext; import org.apache.pig.impl.io.FileSpec; import org.apache.pig.impl.logicalLayer.LOLoad; import org.apache.pig.impl.logicalLayer.LogicalOperator; import org.apache.pig.impl.logicalLayer.OperatorKey; import org.apache.pig.impl.logicalLayer.parser.NodeIdGenerator; import org.apache.pig.impl.util.LineageTracer; public class FetchBaseData { static void ReadBaseData(LogicalOperator op, Map<LOLoad, DataBag> baseData, int sampleSize, PigContext pigContext) throws IOException { if (baseData == null) { throw new IOException("BaseData is null!"); } DataBag test = baseData.get(op); if(test != null) { //The data exists locally and need not be fetched again return; } if (op instanceof LOLoad) { FileSpec fileSpec = ((LOLoad)op).getInputFileSpec(); if(op.outputSchema().fields.isEmpty()) { throw new IOException("Illustrate command needs a user defined schema to function. Please specify a schema while loading the data."); } DataBag opBaseData = BagFactory.getInstance().newDefaultBag(); //POLoad poLoad = new POLoad(pigContext, ((LOLoad) op).getInputFileSpec(), op.getOutputType()); POLoad poLoad = new POLoad(op.getScope(), NodeIdGenerator.getGenerator().getNextNodeId(op.getOperatorKey().getScope()), new HashMap<OperatorKey, ExecPhysicalOperator> (), pigContext, fileSpec, LogicalOperator.FIXED ); poLoad.setLineageTracer(new LineageTracer()); poLoad.open(); for (int i = 0; i < sampleSize; i++) { Tuple t = poLoad.getNext(); if (t == null) break; opBaseData.add(t); } poLoad.close(); baseData.put((LOLoad) op, opBaseData); } else { /*for (Iterator<LogicalOperator> it = op.getInputs().iterator(); it.hasNext(); ) { ReadBaseData(it.next(), baseData, sampleSize, pigContext); }*/ for(OperatorKey opKey : op.getInputs()) { ReadBaseData(op.getOpTable().get(opKey), baseData, sampleSize, pigContext); } } } }
apache-2.0
medicayun/medicayundicom
dcm4chee-xds-infoset/trunk/dcm4chee-xds-infoset-v21/target/generated-sources/wsimport/org/dcm4chee/xds/infoset/v21/RootElement.java
3459
package org.dcm4chee.xds.infoset.v21; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;choice> * &lt;element ref="{urn:oasis:names:tc:ebxml-regrep:registry:xsd:2.1}SubmitObjectsRequest"/> * &lt;element ref="{urn:oasis:names:tc:ebxml-regrep:query:xsd:2.1}AdhocQueryRequest"/> * &lt;element ref="{urn:oasis:names:tc:ebxml-regrep:registry:xsd:2.1}RegistryResponse"/> * &lt;/choice> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "submitObjectsRequest", "adhocQueryRequest", "registryResponse" }) @XmlRootElement(name = "RootElement", namespace = "urn:oasis:names:tc:ebxml-regrep:registry:xsd:2.1") public class RootElement { @XmlElement(name = "SubmitObjectsRequest", namespace = "urn:oasis:names:tc:ebxml-regrep:registry:xsd:2.1") protected SubmitObjectsRequest submitObjectsRequest; @XmlElement(name = "AdhocQueryRequest", namespace = "urn:oasis:names:tc:ebxml-regrep:query:xsd:2.1") protected AdhocQueryRequest adhocQueryRequest; @XmlElement(name = "RegistryResponse", namespace = "urn:oasis:names:tc:ebxml-regrep:registry:xsd:2.1") protected RegistryResponse registryResponse; /** * Gets the value of the submitObjectsRequest property. * * @return * possible object is * {@link SubmitObjectsRequest } * */ public SubmitObjectsRequest getSubmitObjectsRequest() { return submitObjectsRequest; } /** * Sets the value of the submitObjectsRequest property. * * @param value * allowed object is * {@link SubmitObjectsRequest } * */ public void setSubmitObjectsRequest(SubmitObjectsRequest value) { this.submitObjectsRequest = value; } /** * Gets the value of the adhocQueryRequest property. * * @return * possible object is * {@link AdhocQueryRequest } * */ public AdhocQueryRequest getAdhocQueryRequest() { return adhocQueryRequest; } /** * Sets the value of the adhocQueryRequest property. * * @param value * allowed object is * {@link AdhocQueryRequest } * */ public void setAdhocQueryRequest(AdhocQueryRequest value) { this.adhocQueryRequest = value; } /** * Gets the value of the registryResponse property. * * @return * possible object is * {@link RegistryResponse } * */ public RegistryResponse getRegistryResponse() { return registryResponse; } /** * Sets the value of the registryResponse property. * * @param value * allowed object is * {@link RegistryResponse } * */ public void setRegistryResponse(RegistryResponse value) { this.registryResponse = value; } }
apache-2.0
qmwu2000/cat2
cat-plugin-transactions/src/main/java/org/unidal/cat/plugin/transactions/report/view/NameViewModel.java
9907
package org.unidal.cat.plugin.transactions.report.view; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import org.unidal.cat.core.view.PieChart; import org.unidal.cat.core.view.TableViewModel; import org.unidal.cat.plugin.transactions.filter.TransactionsHelper; import org.unidal.cat.plugin.transactions.model.entity.TransactionsName; import org.unidal.cat.plugin.transactions.model.entity.TransactionsReport; import org.unidal.cat.plugin.transactions.model.entity.TransactionsType; import org.unidal.cat.plugin.transactions.model.transform.BaseVisitor; import org.unidal.cat.plugin.transactions.report.view.NameViewModel.NameEntry; import org.unidal.helper.Splitters; public class NameViewModel implements TableViewModel<NameEntry> { private String m_bu; private String m_type; private String m_query; private String m_sortBy; private List<NameEntry> m_entries = new ArrayList<NameEntry>(); private PieChart m_chart = new PieChart(); public NameViewModel(TransactionsReport report, String bu, String type, String query, String sortBy) { m_bu = bu; m_type = type; m_query = query; m_sortBy = sortBy; NameHarvester harvester = new NameHarvester(); report.accept(harvester); harvester.harvest(m_entries); // sort them if (m_sortBy != null && m_sortBy.length() > 0) { NameComparator comparator = new NameComparator(m_sortBy); Collections.sort(m_entries, comparator); } // pie chart for (NameEntry entry : m_entries) { if (!entry.isSummary()) { m_chart.addItem(entry.getId(), entry.getTotal()); } } m_chart.prepare(); } public PieChart getPieChart() { return m_chart; } @Override public int getCount() { return m_entries.size() - 1; } public String getBu() { return m_bu; } public String getQuery() { return m_query; } @Override public List<NameEntry> getRows() { return m_entries; } public String getSortedBy() { return m_sortBy; } public long getTotal() { long total = 0; for (NameEntry entry : m_entries) { if (!entry.isSummary()) { total += entry.getTotal(); } } return total; } public String getType() { return m_type; } private static class NameComparator implements Comparator<NameEntry> { private String m_sortBy; public NameComparator(String sortBy) { m_sortBy = sortBy; } @Override public int compare(NameEntry e1, NameEntry e2) { // keep summary on top if (e2.isSummary()) { return 1; } else if (e1.isSummary()) { return -1; } if (m_sortBy.equals("id")) { return e1.getId().compareTo(e2.getId()); } else if (m_sortBy.equals("total")) { return (int) (e2.getTotal() - e1.getTotal()); } else if (m_sortBy.equals("failure")) { return (int) (e2.getFailure() - e1.getFailure()); } else if (m_sortBy.equals("failurePercent")) { return Double.compare(e2.getFailurePercent(), e1.getFailurePercent()); } else if (m_sortBy.equals("avg")) { return Double.compare(e2.getAvg(), e1.getAvg()); } else if (m_sortBy.equals("95line")) { return Double.compare(e2.getLine95(), e1.getLine95()); } else if (m_sortBy.equals("99line")) { return Double.compare(e2.getLine99(), e1.getLine99()); } else if (m_sortBy.equals("min")) { return Double.compare(e2.getMin(), e1.getMin()); } else if (m_sortBy.equals("max")) { return Double.compare(e2.getMax(), e1.getMax()); } else if (m_sortBy.equals("std")) { return Double.compare(e2.getStd(), e1.getStd()); } else { return 0; // No comparation } } } public static class NameEntry { private TransactionsName m_name; private boolean m_summary; public NameEntry(TransactionsName name) { m_name = name; } public NameEntry(TransactionsName name, boolean summary) { m_name = name; m_summary = summary; } public double getAvg() { return m_name.getAvg(); } public long getFailure() { return m_name.getFailCount(); } public double getFailurePercent() { long total = m_name.getTotalCount(); if (total > 0) { return m_name.getFailCount() * 1.0 / total; } else { return 0; } } public String getId() { return m_name.getId(); } public double getLine95() { return m_name.getLine95Value(); } public double getLine99() { return m_name.getLine99Value(); } public double getMax() { return m_name.getMax(); } public double getMin() { return m_name.getMin(); } public String getSampleMessageId() { String failure = m_name.getFailMessageUrl(); String success = m_name.getSuccessMessageUrl(); if (failure != null) { return failure; } else { return success; } } public double getStd() { double std = std(m_name.getTotalCount(), m_name.getAvg(), m_name.getSum2(), m_name.getMax()); return std; } public long getTotal() { return m_name.getTotalCount(); } public double getTps() { return m_name.getTps(); } public boolean isSummary() { return m_summary; } private double std(long count, double avg, double sum2, double max) { double value = sum2 / count - avg * avg; if (value <= 0 || count <= 1) { return 0; } else if (count == 2) { return max - avg; } else { return Math.sqrt(value); } } } private class NameHarvester extends BaseVisitor { private Map<String, List<TransactionsName>> m_map = new HashMap<String, List<TransactionsName>>(); private QueryFilter m_filter = new QueryFilter(m_query); public void harvest(List<NameEntry> names) { TransactionsHelper helper = new TransactionsHelper(); TransactionsName summary = new TransactionsName("SUMMARY"); for (Map.Entry<String, List<TransactionsName>> e : m_map.entrySet()) { List<TransactionsName> list = e.getValue(); TransactionsName name; if (list.size() == 1) { name = list.get(0); } else { name = new TransactionsName(e.getKey()); for (TransactionsName item : list) { helper.mergeName(name, item); } } helper.mergeName(summary, name); names.add(new NameEntry(name)); } names.add(0, new NameEntry(summary, true)); } @Override public void visitName(TransactionsName name) { if (!m_filter.apply(name)) { return; } String id = name.getId(); List<TransactionsName> list = m_map.get(id); if (list == null) { list = new ArrayList<TransactionsName>(); m_map.put(id, list); } list.add(name); } @Override public void visitType(TransactionsType type) { if (m_type == null || m_type.equals(type.getId())) { super.visitType(type); } } } private static class QueryFilter { private List<QueryRule> m_rules = new ArrayList<QueryRule>(); public QueryFilter(String query) { if (query != null && query.length() > 0) { List<String> parts = Splitters.by(' ').trim().noEmptyItem().split(query); for (String part : parts) { m_rules.add(new QueryRule(part)); } } } public boolean apply(TransactionsName name) { for (QueryRule rule : m_rules) { if (!rule.apply(name)) { return false; } } return true; } } private static class QueryRule { private String m_field; private char m_op = '~'; private List<String> m_args; public QueryRule(String part) { int pos = part.indexOf(':'); if (pos > 0) { m_field = part.substring(0, pos); part = part.substring(pos + 1); } if (part.startsWith("=")) { m_op = '='; // EQ m_args = Splitters.by('|').trim().noEmptyItem().split(part.substring(1)); } else if (part.startsWith("~")) { m_op = '~'; // LIKE m_args = Splitters.by('|').trim().noEmptyItem().split(part.substring(1)); } else { m_op = '~'; // LIKE m_args = Splitters.by('|').trim().noEmptyItem().split(part); } } public boolean apply(TransactionsName name) { if (m_op == '~') { if (m_field == null || "name".equals(m_field)) { for (String part : m_args) { if (name.getId().contains(part)) { return true; } } return false; } } else if (m_op == '=') { if (m_field == null || "name".equals(m_field)) { for (String part : m_args) { if (name.getId().equals(part)) { return true; } } return false; } } return false; } } }
apache-2.0
JP1998/MessagingApp
java/de/jeanpierrehotz/messaging/network/Connected.java
1746
/* * Copyright 2016 Jeremy Schiemann, Jean-Pierre Hotz * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.jeanpierrehotz.messaging.network; /** * Dieses Interface kann implementiert werden, damit die konstanten byte-Codes global konsisitent sind. */ public interface Connected{ /** * Die Konstante, die anzeigt, dass die Verbindung geschlossen wird */ byte BYTECODE_CLOSECONNECTION = -1; /** * Die Konstante, welche eine Nachricht von einem User einleitet */ byte BYTECODE_MESSAGE = 1; /** * Die Konstante, die eine Servernachricht einleitet */ byte BYTECODE_SERVERMESSAGE = 2; /** * Die Konstante, die die Veränderung des Benutzernamens einleitet */ byte BYTECODE_CHANGENAME = 3; /** * Die Konstante, die den Server anpingt (Antwort ist die selbe) */ byte BYTECODE_SERVERPING = 4; /** * Die Konstante, die genutzt wird, um eine Liste der Namen der verbundenen Nutzer zu versenden */ byte BYTECODE_NAMES = 5; /** * Die Konstante, die genutzt wird, um die Anzahl an verbundenen Nutzern zu versenden */ byte BYTECODE_NAMESCOUNT = 6; }
apache-2.0
evant/auto-parcel-gson
auto-parcel-gson-processor/src/test/java/auto/parcelgson/processor/CompilationTest.java
53862
/* * Copyright (C) 2014 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package auto.parcelgson.processor; import static com.google.common.truth.Truth.assertAbout; import static com.google.testing.compile.JavaSourceSubjectFactory.javaSource; import static com.google.testing.compile.JavaSourcesSubjectFactory.javaSources; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.testing.compile.JavaFileObjects; import junit.framework.TestCase; import java.io.IOException; import java.io.PrintWriter; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.util.Set; import javax.annotation.processing.AbstractProcessor; import javax.annotation.processing.ProcessingEnvironment; import javax.annotation.processing.RoundEnvironment; import javax.lang.model.SourceVersion; import javax.lang.model.element.Element; import javax.lang.model.element.TypeElement; import javax.lang.model.util.ElementFilter; import javax.tools.JavaFileObject; /** * @author emcmanus@google.com (Éamonn McManus) */ public class CompilationTest extends TestCase { public void testCompilation() { // Positive test case that ensures we generate the expected code for at least one case. // Most AutoParcel code-generation tests are functional, meaning that we check that the generated // code does the right thing rather than checking what it looks like, but this test is a sanity // check that we are not generating correct but weird code. JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " public abstract int buh();", "", " public static Baz create(int buh) {", " return new AutoParcelGson_Baz(buh);", " }", "}"); JavaFileObject expectedOutput = JavaFileObjects.forSourceLines( "foo.bar.AutoParcelGson_Baz", "package foo.bar;", "", "final class AutoParcelGson_Baz extends Baz {", " private final int buh;", "", " AutoParcelGson_Baz(int buh) {", " this.buh = buh;", " }", "", " @Override public int buh() {", " return buh;", " }", "", " @Override public String toString() {", " return \"Baz{\"", " + \"buh=\" + buh", " + \"}\";", " }", "", " @Override public boolean equals(Object o) {", " if (o == this) {", " return true;", " }", " if (o instanceof Baz) {", " Baz that = (Baz) o;", " return (this.buh == that.buh());", " }", " return false;", " }", "", " @Override public int hashCode() {", " int h = 1;", " h *= 1000003;", " h ^= this.buh;", " return h;", " }", "}" ); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor()) .compilesWithoutError() .and().generatesSources(expectedOutput); } public void testImports() { // Test that referring to the same class in two different ways does not confuse the import logic // into thinking it is two different classes and that therefore it can't import. The code here // is nonsensical but successfully reproduces a real problem, which is that a TypeMirror that is // extracted using Elements.getTypeElement(name).asType() does not compare equal to one that is // extracted from ExecutableElement.getReturnType(), even though Types.isSameType considers them // equal. So unless we are careful, the java.util.Arrays that we import explicitly to use its // methods will appear different from the java.util.Arrays that is the return type of the // arrays() method here. JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "import java.util.Arrays;", "", "@AutoParcelGson", "public abstract class Baz {", " public abstract int[] ints();", " public abstract Arrays arrays();", "", " public static Baz create(int[] ints, Arrays arrays) {", " return new AutoParcelGson_Baz(ints, arrays);", " }", "}"); JavaFileObject expectedOutput = JavaFileObjects.forSourceLines( "foo.bar.AutoParcelGson_Baz", "package foo.bar;", "", "import java.util.Arrays;", "", "final class AutoParcelGson_Baz extends Baz {", " private final int[] ints;", " private final Arrays arrays;", "", " AutoParcelGson_Baz(int[] ints, Arrays arrays) {", " if (ints == null) {", " throw new NullPointerException(\"Null ints\");", " }", " this.ints = ints;", " if (arrays == null) {", " throw new NullPointerException(\"Null arrays\");", " }", " this.arrays = arrays;", " }", "", " @Override public int[] ints() {", " return ints.clone();", " }", "", " @Override public Arrays arrays() {", " return arrays;", " }", "", " @Override public String toString() {", " return \"Baz{\"", " + \"ints=\" + Arrays.toString(ints) + \", \"", " + \"arrays=\" + arrays", " + \"}\";", " }", "", " @Override public boolean equals(Object o) {", " if (o == this) {", " return true;", " }", " if (o instanceof Baz) {", " Baz that = (Baz) o;", " return (Arrays.equals(this.ints, (that instanceof AutoParcelGson_Baz) " + "? ((AutoParcelGson_Baz) that).ints : that.ints()))", " && (this.arrays.equals(that.arrays()));", " }", " return false;", " }", "", " @Override public int hashCode() {", " int h = 1;", " h *= 1000003;", " h ^= Arrays.hashCode(this.ints);", " h *= 1000003;", " h ^= this.arrays.hashCode();", " return h;", " }", "}" ); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor()) .compilesWithoutError() .and().generatesSources(expectedOutput); } public void testNoMultidimensionalPrimitiveArrays() throws Exception { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " public abstract int[][] ints();", "", " public static Baz create(int[][] ints) {", " return new AutoParcelGson_Baz(ints);", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor()) .failsToCompile() .withErrorContaining("AutoParcelGson class cannot define an array-valued property " + "unless it is a primitive array") .in(javaFileObject).onLine(7); } public void testNoObjectArrays() throws Exception { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " public abstract String[] strings();", "", " public static Baz create(String[] strings) {", " return new AutoParcelGson_Baz(strings);", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor()) .failsToCompile() .withErrorContaining("AutoParcelGson class cannot define an array-valued property " + "unless it is a primitive array") .in(javaFileObject).onLine(7); } public void testAnnotationOnInterface() throws Exception { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public interface Baz {}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor()) .failsToCompile() .withErrorContaining("AutoParcelGson only applies to classes") .in(javaFileObject).onLine(6); } public void testAnnotationOnEnum() throws Exception { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public enum Baz {}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor()) .failsToCompile() .withErrorContaining("AutoParcelGson only applies to classes") .in(javaFileObject).onLine(6); } public void testExtendAutoParcel() throws Exception { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Outer", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "public class Outer {", " @AutoParcelGson", " static abstract class Parent {", " static Parent create(int randomProperty) {", " return new AutoParcelGson_Outer_Parent(randomProperty);", " }", "", " abstract int randomProperty();", " }", "", " @AutoParcelGson", " static abstract class Child extends Parent {", " static Child create(int randomProperty) {", " return new AutoParcelGson_Outer_Child(randomProperty);", " }", "", " abstract int randomProperty();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor()) .failsToCompile() .withErrorContaining("may not extend") .in(javaFileObject).onLine(16); } public void testBogusSerialVersionUID() throws Exception { String[] mistakes = { "final long serialVersionUID = 1234L", // not static "static long serialVersionUID = 1234L", // not final "static final Long serialVersionUID = 1234L", // not long "static final long serialVersionUID = (Long) 1234L", // not a compile-time constant }; for (String mistake : mistakes) { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz implements java.io.Serializable {", " " + mistake + ";", "", " public abstract int foo();", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor()) .failsToCompile() .withErrorContaining( "serialVersionUID must be a static final long compile-time constant") .in(javaFileObject).onLine(7); } } public void testNonExistentSuperclass() throws Exception { // The main purpose of this test is to check that AutoParcelProcessor doesn't crash the // compiler in this case. JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Existent extends NonExistent {", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor()) .failsToCompile() .withErrorContaining("NonExistent") .in(javaFileObject).onLine(6); } public void testCannotImplementAnnotation() throws Exception { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.RetentionImpl", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "import java.lang.annotation.Retention;", "import java.lang.annotation.RetentionPolicy;", "", "@AutoParcelGson", "public abstract class RetentionImpl implements Retention {", " public static Retention create(RetentionPolicy policy) {", " return new AutoParcelGson_RetentionImpl(policy);", " }", "", " @Override public Class<? extends Retention> annotationType() {", " return Retention.class;", " }", "", " @Override public boolean equals(Object o) {", " return (o instanceof Retention && value().equals((Retention) o).value());", " }", "", " @Override public int hashCode() {", " return (\"value\".hashCode() * 127) ^ value().hashCode();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor()) .failsToCompile() .withErrorContaining("may not be used to implement an annotation interface") .in(javaFileObject).onLine(8); } public void testMissingPropertyType() throws Exception { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " public abstract MissingType missingType();", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor()) .failsToCompile() .withErrorContaining("MissingType") .in(javaFileObject).onLine(7); } public void testMissingGenericPropertyType() throws Exception { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " public abstract MissingType<?> missingType();", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor()) .failsToCompile() .withErrorContaining("MissingType") .in(javaFileObject).onLine(7); } public void testMissingComplexGenericPropertyType() throws Exception { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "import java.util.Map;", "import java.util.Set;", "", "@AutoParcelGson", "public abstract class Baz {", " public abstract Map<Set<?>, MissingType<?>> missingType();", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor()) .failsToCompile() .withErrorContaining("MissingType") .in(javaFileObject).onLine(10); } public void testMissingSuperclassGenericParameter() throws Exception { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz<T extends MissingType<?>> {", " public abstract int foo();", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor()) .failsToCompile() .withErrorContaining("MissingType") .in(javaFileObject).onLine(6); } public void testCorrectBuilder() throws Exception { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "import java.util.List;", "import javax.annotation.Nullable;", "", "@AutoParcelGson", "public abstract class Baz<T extends Number> {", " public abstract int anInt();", " public abstract byte[] aByteArray();", " @Nullable public abstract int[] aNullableIntArray();", " public abstract List<T> aList();", "", " public abstract Builder<T> toBuilder();", "", " @AutoParcelGson.Validate", " void validate() {", " if (anInt() < 0) {", " throw new IllegalStateException(\"Negative integer\");", " }", " }", "", " @AutoParcelGson.Builder", " public interface Builder<T extends Number> {", " Builder<T> anInt(int x);", " Builder<T> aByteArray(byte[] x);", " Builder<T> aNullableIntArray(@Nullable int[] x);", " Builder<T> aList(List<T> x);", " Baz<T> build();", " }", "", " public static <T extends Number> Builder<T> builder() {", " return AutoParcelGson_Baz.builder();", " }", "}"); JavaFileObject expectedOutput = JavaFileObjects.forSourceLines( "foo.bar.AutoParcelGson_Baz", "package foo.bar;", "", "import java.util.Arrays;", "import java.util.BitSet;", "import java.util.List;", "", "final class AutoParcelGson_Baz<T extends Number> extends Baz<T> {", " private final int anInt;", " private final byte[] aByteArray;", " private final int[] aNullableIntArray;", " private final List<T> aList;", "", " private AutoParcelGson_Baz(" + "int anInt, byte[] aByteArray, int[] aNullableIntArray, List<T> aList) {", " this.anInt = anInt;", " if (aByteArray == null) {", " throw new NullPointerException(\"Null aByteArray\");", " }", " this.aByteArray = aByteArray;", " this.aNullableIntArray = aNullableIntArray;", " if (aList == null) {", " throw new NullPointerException(\"Null aList\");", " }", " this.aList = aList;", " }", "", " @Override public int anInt() {", " return anInt;", " }", "", " @Override public byte[] aByteArray() {", " return aByteArray.clone();", " }", "", " @javax.annotation.Nullable", " @Override public int[] aNullableIntArray() {", " return aNullableIntArray == null ? null : aNullableIntArray.clone();", " }", "", " @Override public List<T> aList() {", " return aList;", " }", "", " @Override public String toString() {", " return \"Baz{\"", " + \"anInt=\" + anInt + \", \"", " + \"aByteArray=\" + Arrays.toString(aByteArray) + \", \"", " + \"aNullableIntArray=\" + Arrays.toString(aNullableIntArray) + \", \"", " + \"aList=\" + aList", " + \"}\";", " }", "", " @Override public boolean equals(Object o) {", " if (o == this) {", " return true;", " }", " if (o instanceof Baz) {", " Baz<?> that = (Baz<?>) o;", " return (this.anInt == that.anInt())", " && (Arrays.equals(this.aByteArray, " + "(that instanceof AutoParcelGson_Baz) " + "? ((AutoParcelGson_Baz) that).aByteArray : that.aByteArray()))", " && (Arrays.equals(this.aNullableIntArray, " + "(that instanceof AutoParcelGson_Baz) " + "? ((AutoParcelGson_Baz) that).aNullableIntArray : that.aNullableIntArray()))", " && (this.aList.equals(that.aList()));", " }", " return false;", " }", "", " @Override public int hashCode() {", " int h = 1;", " h *= 1000003;", " h ^= this.anInt;", " h *= 1000003;", " h ^= Arrays.hashCode(this.aByteArray);", " h *= 1000003;", " h ^= Arrays.hashCode(this.aNullableIntArray);", " h *= 1000003;", " h ^= this.aList.hashCode();", " return h;", " }", "", " @Override public Baz.Builder<T> toBuilder() {", " return new Builder<T>(this);", " }", "", " static final class Builder<T extends Number> implements Baz.Builder<T> {", " private final BitSet set$ = new BitSet();", "", " private int anInt;", " private byte[] aByteArray;", " private int[] aNullableIntArray;", " private List<T> aList;", "", " Builder() {", " }", "", " Builder(Baz<T> source) {", " anInt(source.anInt());", " aByteArray(source.aByteArray());", " aNullableIntArray(source.aNullableIntArray());", " aList(source.aList());", " }", "", " @Override", " public Baz.Builder<T> anInt(int anInt) {", " this.anInt = anInt;", " set$.set(0);", " return this;", " }", "", " @Override", " public Baz.Builder<T> aByteArray(byte[] aByteArray) {", " this.aByteArray = aByteArray.clone();", " set$.set(1);", " return this;", " }", "", " @Override", " public Baz.Builder<T> aNullableIntArray(int[] aNullableIntArray) {", " this.aNullableIntArray = " + "(aNullableIntArray == null) ? null : aNullableIntArray.clone();", " return this;", " }", "", " @Override", " public Baz.Builder<T> aList(List<T> aList) {", " this.aList = aList;", " set$.set(2);", " return this;", " }", "", " @Override", " public Baz<T> build() {", " if (set$.cardinality() < 3) {", " String[] propertyNames = {", " \"anInt\", \"aByteArray\", \"aList\",", " };", " StringBuilder missing = new StringBuilder();", " for (int i = 0; i < 3; i++) {", " if (!set$.get(i)) {", " missing.append(' ').append(propertyNames[i]);", " }", " }", " throw new IllegalStateException(\"Missing required properties:\" + missing);", " }", " Baz<T> result = new AutoParcelGson_Baz<T>(", " this.anInt, this.aByteArray, this.aNullableIntArray, this.aList);", " result.validate();", " return result;", " }", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor()) .compilesWithoutError() .and() .generatesSources(expectedOutput); } public void testAutoParcelBuilderOnTopLevelClass() throws Exception { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Builder", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson.Builder", "public interface Builder {", " Builder foo(int x);", " Object build();", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining("can only be applied to a class or interface inside") .in(javaFileObject).onLine(6); } public void testAutoParcelBuilderNotInsideAutoParcel() throws Exception { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "public abstract class Baz {", " abstract int foo();", "", " static Builder builder() {", " return new AutoParcelGson_Baz.Builder();", " }", "", " @AutoParcelGson.Builder", " public interface Builder {", " Builder foo(int x);", " Baz build();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining("can only be applied to a class or interface inside") .in(javaFileObject).onLine(13); } public void testAutoParcelBuilderOnEnum() throws Exception { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " abstract int foo();", "", " static Builder builder() {", " return null;", " }", "", " @AutoParcelGson.Builder", " public enum Builder {}", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining("can only apply to a class or an interface") .in(javaFileObject).onLine(14); } public void testAutoParcelBuilderDuplicate() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " @AutoParcelGson.Builder", " public interface Builder1 {", " Baz build();", " }", "", " @AutoParcelGson.Builder", " public interface Builder2 {", " Baz build();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining("already has a Builder: foo.bar.Baz.Builder1") .in(javaFileObject).onLine(13); } public void testAutoParcelBuilderMissingSetter() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " abstract int blim();", " abstract String blam();", "", " @AutoParcelGson.Builder", " public interface Builder {", " Builder blam(String x);", " Baz build();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining("with this signature: foo.bar.Baz.Builder blim(int)") .in(javaFileObject).onLine(11); } public void testAutoParcelBuilderMissingSetterUsingSetPrefix() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " abstract int blim();", " abstract String blam();", "", " @AutoParcelGson.Builder", " public interface Builder {", " Builder setBlam(String x);", " Baz build();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining("with this signature: foo.bar.Baz.Builder setBlim(int)") .in(javaFileObject).onLine(11); } public void testAutoParcelBuilderWrongTypeSetter() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " abstract int blim();", " abstract String blam();", "", " @AutoParcelGson.Builder", " public interface Builder {", " Builder blim(String x);", " Builder blam(String x);", " Baz build();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining("Parameter type should be int") .in(javaFileObject).onLine(12); } public void testAutoParcelBuilderWrongTypeSetterWithGetPrefix() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " abstract int getBlim();", " abstract String getBlam();", "", " @AutoParcelGson.Builder", " public interface Builder {", " Builder blim(String x);", " Builder blam(String x);", " Baz build();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining("Parameter type should be int") .in(javaFileObject).onLine(12); } public void testAutoParcelBuilderExtraSetter() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " abstract String blam();", "", " @AutoParcelGson.Builder", " public interface Builder {", " Builder blim(int x);", " Builder blam(String x);", " Baz build();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining("Method does not correspond to a property of foo.bar.Baz") .in(javaFileObject).onLine(11); } public void testAutoParcelBuilderSetPrefixAndNoSetPrefix() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " abstract int blim();", " abstract String blam();", "", " @AutoParcelGson.Builder", " public interface Builder {", " Builder blim(int x);", " Builder setBlam(String x);", " Baz build();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining("If any setter methods use the setFoo convention then all must") .in(javaFileObject).onLine(12); } public void testAutoParcelBuilderAlienMethod() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " abstract String blam();", "", " @AutoParcelGson.Builder", " public interface Builder {", " Builder blam(String x, String y);", " Baz build();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining( "Builder methods must either have no arguments and return foo.bar.Baz or have one" + " argument and return foo.bar.Baz.Builder") .in(javaFileObject).onLine(11); } public void testAutoParcelBuilderMissingBuildMethod() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz<T> {", " abstract T blam();", "", " @AutoParcelGson.Builder", " public interface Builder<T> {", " Builder<T> blam(T x);", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining( "Builder must have a single no-argument method returning foo.bar.Baz<T>") .in(javaFileObject).onLine(10); } public void testAutoParcelBuilderDuplicateBuildMethods() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " abstract String blam();", "", " @AutoParcelGson.Builder", " public interface Builder {", " Builder blam(String x);", " Baz build();", " Baz create();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining("Builder must have a single no-argument method returning foo.bar.Baz") .in(javaFileObject).onLine(12) .and() .withErrorContaining("Builder must have a single no-argument method returning foo.bar.Baz") .in(javaFileObject).onLine(13); } public void testAutoParcelBuilderWrongTypeBuildMethod() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " abstract String blam();", "", " @AutoParcelGson.Builder", " public interface Builder {", " Builder blam(String x);", " String build();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining("Builder must have a single no-argument method returning foo.bar.Baz") .in(javaFileObject).onLine(10); } public void testAutoParcelBuilderTypeParametersDontMatch1() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz<T> {", " abstract String blam();", "", " @AutoParcelGson.Builder", " public interface Builder {", " Builder blam(String x);", " Baz build();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining("Type parameters of foo.bar.Baz.Builder must have same names and " + "bounds as type parameters of foo.bar.Baz") .in(javaFileObject).onLine(10); } public void testAutoParcelBuilderTypeParametersDontMatch2() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz<T> {", " abstract T blam();", "", " @AutoParcelGson.Builder", " public interface Builder<E> {", " Builder<E> blam(E x);", " Baz build();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining("Type parameters of foo.bar.Baz.Builder must have same names and " + "bounds as type parameters of foo.bar.Baz") .in(javaFileObject).onLine(10); } public void testAutoParcelBuilderTypeParametersDontMatch3() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz<T extends Number & Comparable<T>> {", " abstract T blam();", "", " @AutoParcelGson.Builder", " public interface Builder<T extends Number> {", " Builder<T> blam(T x);", " Baz build();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining("Type parameters of foo.bar.Baz.Builder must have same names and " + "bounds as type parameters of foo.bar.Baz") .in(javaFileObject).onLine(10); } public void testAutoParcelBuilderToBuilderWrongTypeParameters() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "abstract class Baz<K extends Comparable<K>, V> {", " abstract K key();", " abstract V value();", " abstract Builder<V, K> toBuilder1();", "", " @AutoParcelGson.Builder", " interface Builder<K extends Comparable<K>, V> {", " Builder<K, V> key(K key);", " Builder<K, V> value(V value);", " Baz<K, V> build();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining("Builder converter method should return foo.bar.Baz.Builder<K, V>") .in(javaFileObject).onLine(9); } public void testAutoParcelBuilderToBuilderDuplicate() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "abstract class Baz<K extends Comparable<K>, V> {", " abstract K key();", " abstract V value();", " abstract Builder<K, V> toBuilder1();", " abstract Builder<K, V> toBuilder2();", "", " @AutoParcelGson.Builder", " interface Builder<K extends Comparable<K>, V> {", " Builder<K, V> key(K key);", " Builder<K, V> value(V value);", " Baz<K, V> build();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining("There can be at most one builder converter method") .in(javaFileObject).onLine(9); } public void testAutoParcelValidateNotInAutoParcel() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "public abstract class Baz {", " abstract String blam();", "", " @AutoParcelGson.Validate", " void validate() {}", "", " public interface Builder {", " Builder blam(String x);", " Baz build();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining( "@AutoParcelGson.Validate can only be applied to a method inside an @AutoParcelGson class") .in(javaFileObject).onLine(9); } public void testAutoParcelValidateWithoutBuilder() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " abstract String blam();", "", " @AutoParcelGson.Validate", " void validate() {}", "", " public interface Builder {", " Builder blam(String x);", " Baz build();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining( "@AutoParcelGson.Validate is only meaningful if there is an @AutoParcelGson.Builder") .in(javaFileObject).onLine(10); } public void testAutoParcelBuilderValidateMethodStatic() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " abstract String blam();", "", " @AutoParcelGson.Validate", " static void validate() {}", "", " @AutoParcelGson.Builder", " public interface Builder {", " Builder blam(String x);", " Baz build();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining("@AutoParcelGson.Validate cannot apply to a static method") .in(javaFileObject).onLine(10); } public void testAutoParcelBuilderValidateMethodNotVoid() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " abstract String blam();", "", " @AutoParcelGson.Validate", " Baz validate() {", " return this;", " }", "", " @AutoParcelGson.Builder", " public interface Builder {", " Builder blam(String x);", " Baz build();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining("@AutoParcelGson.Validate method must be void") .in(javaFileObject).onLine(10); } public void testAutoParcelBuilderValidateMethodWithParameters() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " abstract String blam();", "", " @AutoParcelGson.Validate", " void validate(boolean why) {}", "", " @AutoParcelGson.Builder", " public interface Builder {", " Builder blam(String x);", " Baz build();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining("@AutoParcelGson.Validate method must not have parameters") .in(javaFileObject).onLine(10); } public void testAutoParcelBuilderValidateMethodDuplicate() { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " abstract String blam();", "", " @AutoParcelGson.Validate", " void validate() {}", "", " @AutoParcelGson.Validate", " void validateSomeMore() {}", "", " @AutoParcelGson.Builder", " public interface Builder {", " Builder blam(String x);", " Baz build();", " }", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor(), new AutoParcelBuilderProcessor()) .failsToCompile() .withErrorContaining("There can only be one @AutoParcelGson.Validate method") .in(javaFileObject).onLine(13); } public void testGetFooIsFoo() throws Exception { JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " abstract int getFoo();", " abstract boolean isFoo();", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new AutoParcelProcessor()) .failsToCompile() .withErrorContaining("More than one @AutoParcelGson property called foo") .in(javaFileObject).onLine(8); } private static class PoisonedAutoParcelProcessor extends AutoParcelProcessor { private final IllegalArgumentException filerException; PoisonedAutoParcelProcessor(IllegalArgumentException filerException) { this.filerException = filerException; } private class ErrorInvocationHandler implements InvocationHandler { private final ProcessingEnvironment originalProcessingEnv; ErrorInvocationHandler(ProcessingEnvironment originalProcessingEnv) { this.originalProcessingEnv = originalProcessingEnv; } @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { if (method.getName().equals("getFiler")) { throw filerException; } else { return method.invoke(originalProcessingEnv, args); } } }; @Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { ProcessingEnvironment poisonedProcessingEnv = (ProcessingEnvironment) Proxy.newProxyInstance( getClass().getClassLoader(), new Class<?>[] {ProcessingEnvironment.class}, new ErrorInvocationHandler(processingEnv)); processingEnv = poisonedProcessingEnv; return super.process(annotations, roundEnv); } } public void testExceptionBecomesError() throws Exception { // Ensure that if the annotation processor code gets an unexpected exception, it is converted // into a compiler error rather than being propagated. Otherwise the output can be very // confusing to the user who stumbles into a bug that causes an exception, whether in // AutoParcelProcessor or javac. // We inject an exception by subclassing AutoParcelProcessor in order to poison its processingEnv // in a way that will cause an exception the first time it tries to get the Filer. IllegalArgumentException exception = new IllegalArgumentException("I don't understand the question, and I won't respond to it"); JavaFileObject javaFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " public abstract int foo();", "}"); assertAbout(javaSource()) .that(javaFileObject) .processedWith(new PoisonedAutoParcelProcessor(exception)) .failsToCompile() .withErrorContaining(exception.toString()) .in(javaFileObject).onLine(6); } @Retention(RetentionPolicy.SOURCE) public @interface Foo {} /* Processor that generates an empty class BarFoo every time it sees a class Bar annotated with * @Foo. */ public static class FooProcessor extends AbstractProcessor { @Override public Set<String> getSupportedAnnotationTypes() { return ImmutableSet.of(Foo.class.getCanonicalName()); } @Override public SourceVersion getSupportedSourceVersion() { return SourceVersion.latestSupported(); } @Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { Set<? extends Element> elements = roundEnv.getElementsAnnotatedWith(Foo.class); for (TypeElement type : ElementFilter.typesIn(elements)) { try { generateFoo(type); } catch (IOException e) { throw new AssertionError(e); } } return false; } private void generateFoo(TypeElement type) throws IOException { String pkg = TypeSimplifier.packageNameOf(type); String className = type.getSimpleName().toString(); String generatedClassName = className + "Foo"; JavaFileObject source = processingEnv.getFiler().createSourceFile(pkg + "." + generatedClassName, type); PrintWriter writer = new PrintWriter(source.openWriter()); writer.println("package " + pkg + ";"); writer.println("public class " + generatedClassName + " {}"); writer.close(); } } public void testReferencingGeneratedClass() { // Test that ensures that a type that does not exist can be the type of an @AutoParcelGson property // as long as it later does come into existence. The BarFoo type referenced here does not exist // when the AutoParcelProcessor runs on the first round, but the FooProcessor then generates it. // That generation provokes a further round of annotation processing and AutoParcelProcessor // should succeed then. JavaFileObject bazFileObject = JavaFileObjects.forSourceLines( "foo.bar.Baz", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@AutoParcelGson", "public abstract class Baz {", " public abstract BarFoo barFoo();", "", " public static Baz create(BarFoo barFoo) {", " return new AutoParcelGson_Baz(barFoo);", " }", "}"); JavaFileObject barFileObject = JavaFileObjects.forSourceLines( "foo.bar.Bar", "package foo.bar;", "", "import auto.parcelgson.AutoParcelGson;", "", "@" + Foo.class.getCanonicalName(), "public abstract class Bar {", " public abstract BarFoo barFoo();", "}"); assertAbout(javaSources()) .that(ImmutableList.of(bazFileObject, barFileObject)) .processedWith(new AutoParcelProcessor(), new FooProcessor()) .compilesWithoutError(); } }
apache-2.0
gocd/gocd
config/config-api/src/main/java/com/thoughtworks/go/config/TimerConfig.java
3577
/* * Copyright 2022 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.config; import com.thoughtworks.go.domain.ConfigErrors; import org.quartz.CronExpression; import java.text.ParseException; import java.util.Map; /** * @understands Configuration of a Pipeline cron timer */ @ConfigTag("timer") public class TimerConfig implements Validatable { @ConfigAttribute(value = "onlyOnChanges", optional = true, allowNull = true) private Boolean onlyOnChanges = false; @ConfigValue private String timerSpec; public static final String TIMER_SPEC = "timerSpec"; public static final String TIMER_ONLY_ON_CHANGES = "onlyOnChanges"; private final ConfigErrors errors = new ConfigErrors(); public TimerConfig() { } public TimerConfig(String timerSpec, boolean onlyOnChanges) { this.timerSpec = timerSpec; this.onlyOnChanges = onlyOnChanges; } public String getTimerSpec() { return timerSpec; } public void setTimerSpec(String timerSpec){ this.timerSpec=timerSpec; } public boolean shouldTriggerOnlyOnChanges() { return onlyOnChanges; } //Only for Rails public boolean getOnlyOnChanges(){ return onlyOnChanges; } public void setOnlyOnChanges(boolean onlyOnChanges){ this.onlyOnChanges = onlyOnChanges; } public static TimerConfig createTimer(Object attributes) { Map timerConfigMap = (Map) attributes; String timerSpec = (String) timerConfigMap.get(TIMER_SPEC); if (timerSpec.isEmpty()) { return null; } String onlyOnChanges = (String) timerConfigMap.get(TIMER_ONLY_ON_CHANGES); return new TimerConfig(timerSpec, "1".equals(onlyOnChanges)); } public boolean validateTree(ValidationContext validationContext) { validate(validationContext); return errors().isEmpty(); } @Override public void validate(ValidationContext validationContext) { if (timerSpec == null) { errors.add(TIMER_SPEC, "Timer Spec can not be null."); return; } try { new CronExpression(timerSpec); } catch (ParseException pe) { errors.add(TIMER_SPEC, "Invalid cron syntax: " + pe.getMessage()); } } @Override public ConfigErrors errors() { return errors; } @Override public void addError(String fieldName, String message) { errors.add(fieldName, message); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } TimerConfig that = (TimerConfig) o; if (timerSpec != null ? !timerSpec.equals(that.timerSpec) : that.timerSpec != null) { return false; } return true; } @Override public int hashCode() { return timerSpec != null ? timerSpec.hashCode() : 0; } }
apache-2.0
Fabryprog/camel
core/camel-base/src/main/java/org/apache/camel/processor/ContractAdvice.java
8765
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.processor; import org.apache.camel.CamelContext; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.ValidationException; import org.apache.camel.spi.CamelInternalProcessorAdvice; import org.apache.camel.spi.Contract; import org.apache.camel.spi.DataType; import org.apache.camel.spi.DataTypeAware; import org.apache.camel.spi.Transformer; import org.apache.camel.spi.Validator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A {@link CamelInternalProcessorAdvice} which applies {@link Transformer} and {@link Validator} * according to the data type Contract. * <p/> * The default camel {@link Message} implements {@link DataTypeAware} which * holds a {@link DataType} to indicate current message type. If the input type * declared by {@link org.apache.camel.model.InputTypeDefinition} is different from current IN message type, * camel internal processor look for a Transformer which transforms from the current * message type to the expected message type before routing. * After routing, if the output type declared by {@link org.apache.camel.model.OutputTypeDefinition} is different * from current OUT message (or IN message if no OUT), camel look for a Transformer and apply. * * @see Transformer * @see Validator * @see org.apache.camel.model.InputTypeDefinition * @see org.apache.camel.model.OutputTypeDefinition */ public class ContractAdvice implements CamelInternalProcessorAdvice { private static final Logger LOG = LoggerFactory.getLogger(ContractAdvice.class); private Contract contract; public ContractAdvice(Contract contract) { this.contract = contract; } @Override public Object before(Exchange exchange) throws Exception { if (!(exchange.getIn() instanceof DataTypeAware)) { return null; } try { DataType to = contract.getInputType(); if (to != null) { DataTypeAware target = (DataTypeAware) exchange.getIn(); DataType from = target.getDataType(); if (!to.equals(from)) { LOG.debug("Looking for transformer for INPUT: from='{}', to='{}'", from, to); doTransform(exchange.getIn(), from, to); target.setDataType(to); } if (contract.isValidateInput()) { doValidate(exchange.getIn(), to); } } } catch (Exception e) { exchange.setException(e); } return null; } @Override public void after(Exchange exchange, Object data) throws Exception { if (exchange.isFailed()) { // TODO can we add FAULT_TYPE processing? return; } Message target = exchange.hasOut() ? exchange.getOut() : exchange.getIn(); if (!(target instanceof DataTypeAware)) { return; } try { DataType to = contract.getOutputType(); if (to != null) { DataTypeAware typeAwareTarget = (DataTypeAware)target; DataType from = typeAwareTarget.getDataType(); if (!to.equals(from)) { LOG.debug("Looking for transformer for OUTPUT: from='{}', to='{}'", from, to); doTransform(target, from, to); typeAwareTarget.setDataType(to); } if (contract.isValidateOutput()) { doValidate(target, to); } } } catch (Exception e) { exchange.setException(e); } } private void doTransform(Message message, DataType from, DataType to) throws Exception { if (from == null) { // If 'from' is null, only Java-Java convertion is performed. // It means if 'to' is other than Java, it's assumed to be already in expected type. convertIfRequired(message, to); return; } // transform into 'from' type before performing declared transformation convertIfRequired(message, from); if (applyMatchedTransformer(message, from, to)) { // Found matched transformer. Java-Java transformer is also allowed. return; } else if (from.isJavaType()) { // Try TypeConverter as a fallback for Java->Java transformation convertIfRequired(message, to); // If Java->Other transformation required but no transformer matched, // then assume it's already in expected type, i.e. do nothing. return; } else if (applyTransformerChain(message, from, to)) { // Other->Other transformation - found a transformer chain return; } throw new IllegalArgumentException("No Transformer found for [from='" + from + "', to='" + to + "']"); } private boolean convertIfRequired(Message message, DataType type) throws Exception { // TODO for better performance it may be better to add TypeConverterTransformer // into transformer registry automatically to avoid unnecessary scan in transformer registry if (type != null && type.isJavaType() && type.getName() != null) { CamelContext context = message.getExchange().getContext(); Class<?> typeJava = getClazz(type.getName(), context); if (!typeJava.isAssignableFrom(message.getBody().getClass())) { LOG.debug("Converting to '{}'", typeJava.getName()); message.setBody(message.getMandatoryBody(typeJava)); return true; } } return false; } private boolean applyTransformer(Transformer transformer, Message message, DataType from, DataType to) throws Exception { if (transformer != null) { LOG.debug("Applying transformer: from='{}', to='{}', transformer='{}'", from, to, transformer); transformer.transform(message, from, to); return true; } return false; } private boolean applyMatchedTransformer(Message message, DataType from, DataType to) throws Exception { Transformer transformer = message.getExchange().getContext().resolveTransformer(from, to); return applyTransformer(transformer, message, from, to); } private boolean applyTransformerChain(Message message, DataType from, DataType to) throws Exception { CamelContext context = message.getExchange().getContext(); Transformer fromTransformer = context.resolveTransformer(from.getModel()); Transformer toTransformer = context.resolveTransformer(to.getModel()); if (fromTransformer != null && toTransformer != null) { LOG.debug("Applying transformer 1/2: from='{}', to='{}', transformer='{}'", from, to, fromTransformer); fromTransformer.transform(message, from, new DataType(Object.class)); LOG.debug("Applying transformer 2/2: from='{}', to='{}', transformer='{}'", from, to, toTransformer); toTransformer.transform(message, new DataType(Object.class), to); return true; } return false; } private Class<?> getClazz(String type, CamelContext context) throws Exception { return context.getClassResolver().resolveMandatoryClass(type); } private void doValidate(Message message, DataType type) throws ValidationException { Validator validator = message.getExchange().getContext().resolveValidator(type); if (validator != null) { LOG.debug("Applying validator: type='{}', validator='{}'", type, validator); validator.validate(message, type); } else { throw new ValidationException(message.getExchange(), String.format("No Validator found for '%s'", type)); } } }
apache-2.0
tom1120/ninja
ninja-servlet/src/main/java/ninja/servlet/NinjaServletDispatcher.java
2455
/** * Copyright (C) 2012-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ninja.servlet; import java.io.IOException; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import ninja.Context; import ninja.Ninja; import com.google.inject.Inject; import com.google.inject.Injector; /** * A simple servlet that allows us to run Ninja inside any servlet * container. * * @author ra * */ public class NinjaServletDispatcher extends HttpServlet { private static final long serialVersionUID = 1L; @Inject private Injector injector; @Inject private Ninja ninja; public NinjaServletDispatcher() { } /** * Special constructor for usage in JUnit tests. * in regular case we have injector from NinjaServletListener */ public NinjaServletDispatcher(Injector injector) { this.injector = injector; } @Override public void service(ServletRequest req, ServletResponse resp ) throws IOException, ServletException { HttpServletRequest request = (HttpServletRequest) req; HttpServletResponse response = (HttpServletResponse) resp; ServletContext servletContext = getServletContext(); // We generate a Ninja compatible context element NinjaServletContext context = (NinjaServletContext) injector.getProvider(Context.class) .get(); // And populate it context.init(servletContext, request, response); // And invoke ninja on it. // Ninja handles all defined routes, filters and much more: ninja.onRouteRequest(context); } }
apache-2.0
shyTNT/googleads-java-lib
examples/dfp_axis/src/main/java/dfp/axis/v201508/userservice/GetCurrentUser.java
2287
// Copyright 2015 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package dfp.axis.v201508.userservice; import com.google.api.ads.common.lib.auth.OfflineCredentials; import com.google.api.ads.common.lib.auth.OfflineCredentials.Api; import com.google.api.ads.dfp.axis.factory.DfpServices; import com.google.api.ads.dfp.axis.v201508.User; import com.google.api.ads.dfp.axis.v201508.UserServiceInterface; import com.google.api.ads.dfp.lib.client.DfpSession; import com.google.api.client.auth.oauth2.Credential; /** * This example gets the current user. * * Credentials and properties in {@code fromFile()} are pulled from the * "ads.properties" file. See README for more info. */ public class GetCurrentUser { public static void runExample(DfpServices dfpServices, DfpSession session) throws Exception { // Get the UserService. UserServiceInterface userService = dfpServices.get(session, UserServiceInterface.class); // Get the current user. User user = userService.getCurrentUser(); System.out.printf( "User with ID \"%d\", name \"%s\", email \"%s\", and role \"%s\" is the current user.\n", user.getId(), user.getName(), user.getEmail(), user.getRoleName()); } public static void main(String[] args) throws Exception { // Generate a refreshable OAuth2 credential. Credential oAuth2Credential = new OfflineCredentials.Builder() .forApi(Api.DFP) .fromFile() .build() .generateCredential(); // Construct a DfpSession. DfpSession session = new DfpSession.Builder() .fromFile() .withOAuth2Credential(oAuth2Credential) .build(); DfpServices dfpServices = new DfpServices(); runExample(dfpServices, session); } }
apache-2.0
rsocket/rsocket-java
rsocket-core/src/main/java/io/rsocket/core/StreamIdSupplier.java
1736
/* * Copyright 2015-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.rsocket.core; import io.netty.util.collection.IntObjectMap; /** This API is not thread-safe and must be strictly used in serialized fashion */ final class StreamIdSupplier { private static final int MASK = 0x7FFFFFFF; private long streamId; // Visible for testing StreamIdSupplier(int streamId) { this.streamId = streamId; } static StreamIdSupplier clientSupplier() { return new StreamIdSupplier(-1); } static StreamIdSupplier serverSupplier() { return new StreamIdSupplier(0); } /** * This methods provides new stream id and ensures there is no intersections with already running * streams. This methods is not thread-safe. * * @param streamIds currently running streams store * @return next stream id */ int nextStreamId(IntObjectMap<?> streamIds) { int streamId; do { this.streamId += 2; streamId = (int) (this.streamId & MASK); } while (streamId == 0 || streamIds.containsKey(streamId)); return streamId; } boolean isBeforeOrCurrent(int streamId) { return this.streamId >= streamId && streamId > 0; } }
apache-2.0
nabilzhang/enunciate
jackson/src/main/java/com/webcohesion/enunciate/modules/jackson/model/types/JsonType.java
1535
/** * Copyright © 2006-2016 Web Cohesion (info@webcohesion.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.webcohesion.enunciate.modules.jackson.model.types; /** * @author Ryan Heaton */ public interface JsonType { /** * Whether this is an object JSON type. * * @return Whether this is an object JSON type. */ boolean isObject(); /** * Whether this is an array JSON type. * * @return Whether this is an array JSON type. */ boolean isArray(); /** * Whether this is a string JSON type. * * @return Whether this is a string JSON type. */ boolean isString(); /** * Whether this is a number JSON type. * * @return Whether this is a number JSON type. */ boolean isNumber(); /** * Whether this is a whole number. * * @return Whether this is a whole number. */ boolean isWholeNumber(); /** * Whether this is a boolean JSON type. * * @return Whether this is a boolean JSON type. */ boolean isBoolean(); }
apache-2.0
weiwenqiang/GitHub
MVP/Android-ZBLibrary-master/ZBLibrary(AndroidStudio)/app/src/main/java/zblibrary/demo/DEMO/DemoBottomWindow.java
4795
/*Copyright ©2015 TommyLemon(https://github.com/TommyLemon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.*/ package zblibrary.demo.DEMO; import zblibrary.demo.R; import zblibrary.demo.activity_fragment.UserActivity; import zuo.biao.library.base.BaseViewBottomWindow; import zuo.biao.library.model.Entry; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.view.View; import android.view.View.OnClickListener; /**使用方法:复制>粘贴>改名>改代码 */ /**底部弹出窗口界面示例 * @author Lemon * <br> toActivity或startActivityForResult (DemoBottomWindow.createIntent(...), requestCode); * <br> 然后在onActivityResult方法内 * <br> data.getStringExtra(DemoBottomWindow.RESULT_DATA); 可得到返回值 */ public class DemoBottomWindow extends BaseViewBottomWindow<Entry<String, String>, DemoView> implements OnClickListener { private static final String TAG = "DemoBottomWindow"; //启动方法<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< public static Intent createIntent(Context context, String title) { return new Intent(context, DemoBottomWindow.class).putExtra(INTENT_TITLE, title); } //启动方法>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> @Override public Activity getActivity() { return this; } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); //功能归类分区方法,必须调用<<<<<<<<<< initView(); initData(); initEvent(); //功能归类分区方法,必须调用>>>>>>>>>> } //UI显示区(操作UI,但不存在数据获取或处理代码,也不存在事件监听代码)<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< @Override public void initView() {//必须调用 super.initView(); } //UI显示区(操作UI,但不存在数据获取或处理代码,也不存在事件监听代码)>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> //Data数据区(存在数据获取或处理代码,但不存在事件监听代码)<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< @Override public void initData() {//必须调用 super.initData(); data = new Entry<String, String>("Activity", TAG); data.setId(1); containerView.bindView(data); } @Override public String getTitleName() { return "Demo"; } @Override public String getReturnName() { return null; } @Override public String getForwardName() { return null; } @Override protected DemoView createView() { return new DemoView(context, getResources()); } @Override protected void setResult() { //示例代码<<<<<<<<<<<<<<<<<<< setResult(RESULT_OK, new Intent().putExtra(RESULT_DATA, TAG + " saved")); //示例代码>>>>>>>>>>>>>>>>>>> } //Data数据区(存在数据获取或处理代码,但不存在事件监听代码)>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> //Event事件区(只要存在事件监听代码就是)<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< @Override public void initEvent() {//必须调用 super.initEvent(); containerView.setOnClickListener(this); } //系统自带监听<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< @Override public void onClick(View v) { switch (v.getId()) { case R.id.ivDemoViewHead: if (data != null) { toActivity(UserActivity.createIntent(context, data.getId())); } break; default: break; } } //类相关监听<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< //类相关监听>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> //系统自带监听方法>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> //类相关监听>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> //系统自带监听>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> //Event事件区(只要存在事件监听代码就是)>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> //内部类,尽量少用<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< //内部类,尽量少用>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> }
apache-2.0
kevinearls/camel
components/camel-cmis/src/main/java/org/apache/camel/component/cmis/DefaultCMISSessionFacadeFactory.java
1580
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.cmis; import java.util.HashMap; import java.util.Map; import org.apache.camel.support.EndpointHelper; public class DefaultCMISSessionFacadeFactory implements CMISSessionFacadeFactory { @Override public CMISSessionFacade create(CMISEndpoint endpoint) throws Exception { CMISSessionFacade facade = new CMISSessionFacade(endpoint.getCmsUrl()); // must use a copy of the properties Map<String, Object> copy = new HashMap<>(endpoint.getProperties()); // which we then set on the newly created facade EndpointHelper.setReferenceProperties(endpoint.getCamelContext(), facade, copy); EndpointHelper.setProperties(endpoint.getCamelContext(), facade, copy); return facade; } }
apache-2.0
shardingjdbc/sharding-jdbc
sharding-integration-test/sharding-jdbc-test/src/test/java/org/apache/shardingsphere/dbtest/env/schema/SchemaEnvironmentManager.java
8050
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.dbtest.env.schema; import com.google.common.base.Joiner; import lombok.AccessLevel; import lombok.NoArgsConstructor; import org.apache.shardingsphere.dbtest.env.EnvironmentPath; import org.apache.shardingsphere.dbtest.env.IntegrateTestEnvironment; import org.apache.shardingsphere.dbtest.env.datasource.DataSourceUtil; import org.apache.shardingsphere.underlying.common.database.type.DatabaseType; import org.h2.tools.RunScript; import javax.sql.DataSource; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import java.io.FileReader; import java.io.IOException; import java.io.StringReader; import java.sql.Connection; import java.sql.SQLException; import java.util.Collection; import java.util.Collections; import java.util.LinkedList; import java.util.List; /** * Schema environment manager. */ @NoArgsConstructor(access = AccessLevel.PRIVATE) public final class SchemaEnvironmentManager { /** * Get data source names. * * @param ruleType rule type * @return data source names * @throws IOException IO exception * @throws JAXBException JAXB exception */ public static Collection<String> getDataSourceNames(final String ruleType) throws IOException, JAXBException { return unmarshal(EnvironmentPath.getDatabaseEnvironmentResourceFile(ruleType)).getDatabases(); } /** * Create database. * * @param ruleType rule type * @throws IOException IO exception * @throws JAXBException JAXB exception * @throws SQLException SQL exception */ public static void createDatabase(final String ruleType) throws IOException, JAXBException, SQLException { SchemaEnvironment databaseInitialization = unmarshal(EnvironmentPath.getDatabaseEnvironmentResourceFile(ruleType)); for (DatabaseType each : IntegrateTestEnvironment.getInstance().getDatabaseTypes()) { DataSource dataSource = DataSourceUtil.createDataSource(each, null); try ( Connection connection = dataSource.getConnection(); StringReader stringReader = new StringReader(Joiner.on(";\n").skipNulls().join(generateCreateDatabaseSQLs(each, databaseInitialization.getDatabases())))) { RunScript.execute(connection, stringReader); } } } /** * Drop database. * * @param ruleType rule type * @throws IOException IO exception * @throws JAXBException JAXB exception */ public static void dropDatabase(final String ruleType) throws IOException, JAXBException { SchemaEnvironment databaseInitialization = unmarshal(EnvironmentPath.getDatabaseEnvironmentResourceFile(ruleType)); for (DatabaseType each : IntegrateTestEnvironment.getInstance().getDatabaseTypes()) { DataSource dataSource = DataSourceUtil.createDataSource(each, null); try ( Connection connection = dataSource.getConnection(); StringReader stringReader = new StringReader(Joiner.on(";\n").skipNulls().join(generateDropDatabaseSQLs(each, databaseInitialization.getDatabases())))) { RunScript.execute(connection, stringReader); } catch (final SQLException ex) { // TODO database maybe not exist } } } private static SchemaEnvironment unmarshal(final String databaseInitializationFilePath) throws IOException, JAXBException { try (FileReader reader = new FileReader(databaseInitializationFilePath)) { return (SchemaEnvironment) JAXBContext.newInstance(SchemaEnvironment.class).createUnmarshaller().unmarshal(reader); } } private static Collection<String> generateCreateDatabaseSQLs(final DatabaseType databaseType, final List<String> databases) { if ("H2".equals(databaseType.getName())) { return Collections.emptyList(); } String sql = "Oracle".equals(databaseType.getName()) ? "CREATE SCHEMA %s" : "CREATE DATABASE %s"; Collection<String> result = new LinkedList<>(); for (String each : databases) { result.add(String.format(sql, each)); } return result; } private static Collection<String> generateDropDatabaseSQLs(final DatabaseType databaseType, final List<String> databases) { if ("H2".equals(databaseType.getName())) { return Collections.emptyList(); } String sql = "Oracle".equals(databaseType.getName()) ? "DROP SCHEMA %s" : "DROP DATABASE IF EXISTS %s"; Collection<String> result = new LinkedList<>(); for (String each : databases) { result.add(String.format(sql, each)); } return result; } /** * Create table. * * @param ruleType rule type * @throws JAXBException JAXB exception * @throws IOException IO exception * @throws SQLException SQL exception */ public static void createTable(final String ruleType) throws JAXBException, IOException, SQLException { for (DatabaseType each : IntegrateTestEnvironment.getInstance().getDatabaseTypes()) { SchemaEnvironment databaseEnvironmentSchema = unmarshal(EnvironmentPath.getDatabaseEnvironmentResourceFile(ruleType)); createTable(databaseEnvironmentSchema, each); } } private static void createTable(final SchemaEnvironment databaseEnvironmentSchema, final DatabaseType databaseType) throws SQLException { for (String each : databaseEnvironmentSchema.getDatabases()) { DataSource dataSource = DataSourceUtil.createDataSource(databaseType, each); try (Connection connection = dataSource.getConnection(); StringReader stringReader = new StringReader(Joiner.on(";\n").join(databaseEnvironmentSchema.getTableCreateSQLs()))) { RunScript.execute(connection, stringReader); } } } /** * Drop table. * * @param ruleType rule type * @throws JAXBException JAXB exception * @throws IOException IO exception */ public static void dropTable(final String ruleType) throws JAXBException, IOException { for (DatabaseType each : IntegrateTestEnvironment.getInstance().getDatabaseTypes()) { SchemaEnvironment databaseEnvironmentSchema = unmarshal(EnvironmentPath.getDatabaseEnvironmentResourceFile(ruleType)); dropTable(databaseEnvironmentSchema, each); } } private static void dropTable(final SchemaEnvironment databaseEnvironmentSchema, final DatabaseType databaseType) { for (String each : databaseEnvironmentSchema.getDatabases()) { DataSource dataSource = DataSourceUtil.createDataSource(databaseType, each); try (Connection connection = dataSource.getConnection(); StringReader stringReader = new StringReader(Joiner.on(";\n").join(databaseEnvironmentSchema.getTableDropSQLs()))) { RunScript.execute(connection, stringReader); } catch (final SQLException ex) { // TODO table maybe not exist } } } }
apache-2.0
clonetwin26/buck
test/com/facebook/buck/rules/coercer/CoercedTypeCacheTest.java
5705
/* * Copyright 2017-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.rules.coercer; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import com.facebook.buck.rules.Hint; import com.facebook.buck.util.immutables.BuckStyleImmutable; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Ordering; import java.util.Optional; import org.immutables.value.Value; import org.junit.Test; public class CoercedTypeCacheTest { @Test public void requiredIsNotOptional() { assertFalse(getParamInfo("required").isOptional()); } @Test public void optionalIsOptional() { assertTrue(getParamInfo("optional").isOptional()); } @Test public void optionalIsInheritedOptional() { assertTrue(getParamInfo("interfaceOptional").isOptional()); } @Test public void defaultValuesAreOptional() { assertTrue(getParamInfo("default").isOptional()); } @Test public void defaultValuesAreOptionalThroughInheritence() { assertTrue(getParamInfo("interfaceDefault").isOptional()); } @Test public void getName() { assertEquals( ImmutableSortedSet.of( "consistentOverriddenInterfaceNonDep", "consistentOverriddenInterfaceNonInput", "default", "interfaceDefault", "interfaceNonDep", "interfaceNonInput", "interfaceOptional", "optional", "overriddenInterfaceNonDep", "overriddenInterfaceNonInput", "nonDep", "nonInput", "required"), ImmutableSortedSet.copyOf( CoercedTypeCache.extractForImmutableBuilder( Dto.Builder.class, new DefaultTypeCoercerFactory()) .keySet())); } @Test public void getPythonName() { assertEquals( ImmutableSortedSet.of( "consistent_overridden_interface_non_dep", "consistent_overridden_interface_non_input", "default", "interface_default", "interface_non_dep", "interface_non_input", "interface_optional", "optional", "overridden_interface_non_dep", "overridden_interface_non_input", "non_dep", "non_input", "required"), CoercedTypeCache.extractForImmutableBuilder( Dto.Builder.class, new DefaultTypeCoercerFactory()) .values() .stream() .map(ParamInfo::getPythonName) .collect(ImmutableSortedSet.toImmutableSortedSet(Ordering.natural()))); } @Test public void isDep() { assertFalse(getParamInfo("nonDep").isDep()); assertTrue(getParamInfo("optional").isDep()); } @Test public void isDepInherited() { assertFalse(getParamInfo("interfaceNonDep").isDep()); assertFalse(getParamInfo("consistentOverriddenInterfaceNonDep").isDep()); assertTrue(getParamInfo("overriddenInterfaceNonDep").isDep()); assertTrue(getParamInfo("interfaceOptional").isDep()); } @Test public void isInput() { assertFalse(getParamInfo("nonInput").isInput()); assertTrue(getParamInfo("optional").isInput()); } @Test public void isInputInherited() { assertFalse(getParamInfo("interfaceNonInput").isInput()); assertFalse(getParamInfo("consistentOverriddenInterfaceNonInput").isInput()); assertTrue(getParamInfo("overriddenInterfaceNonInput").isInput()); assertTrue(getParamInfo("interfaceOptional").isInput()); } interface DtoInterface { Optional<String> getInterfaceOptional(); @Value.Default default String getInterfaceDefault() { return "blue"; } @Hint(isDep = false) String getInterfaceNonDep(); @Hint(isDep = false) String getOverriddenInterfaceNonDep(); @Hint(isDep = false) String getConsistentOverriddenInterfaceNonDep(); @Hint(isInput = false) String getInterfaceNonInput(); @Hint(isInput = false) String getOverriddenInterfaceNonInput(); @Hint(isInput = false) String getConsistentOverriddenInterfaceNonInput(); } @BuckStyleImmutable @Value.Immutable abstract static class AbstractDto implements DtoInterface { abstract Optional<String> getOptional(); abstract String getRequired(); @Value.Default String getDefault() { return "purple"; } @Hint(isDep = false) abstract String getNonDep(); @Override public abstract String getOverriddenInterfaceNonDep(); @Override @Hint(isDep = false) public abstract String getConsistentOverriddenInterfaceNonDep(); @Hint(isInput = false) abstract String getNonInput(); @Override public abstract String getOverriddenInterfaceNonInput(); @Override @Hint(isInput = false) public abstract String getConsistentOverriddenInterfaceNonInput(); } private static ParamInfo getParamInfo(String name) { return CoercedTypeCache.extractForImmutableBuilder( Dto.Builder.class, new DefaultTypeCoercerFactory()) .get(name); } }
apache-2.0
liveontologies/elk-reasoner
elk-reasoner/src/main/java/org/semanticweb/elk/reasoner/tracing/TracingInference.java
1509
package org.semanticweb.elk.reasoner.tracing; import org.liveontologies.puli.Inference; import org.semanticweb.elk.reasoner.indexing.model.IndexedAxiomInference; import org.semanticweb.elk.reasoner.saturation.inferences.SaturationInference; /* * #%L * ELK Reasoner * $Id:$ * $HeadURL:$ * %% * Copyright (C) 2011 - 2015 Department of Computer Science, University of Oxford * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * An operation producing {@link Conclusion}s from other {@link Conclusion}s * called premises using some inference rule. * * @author Yevgeny Kazakov * */ public interface TracingInference extends Inference<Conclusion> { public <O> O accept(Visitor<O> visitor); /** * The visitor pattern for instances * * @author Yevgeny Kazakov * * @param <O> * the type of the output */ interface Visitor<O> extends IndexedAxiomInference.Visitor<O>, SaturationInference.Visitor<O> { // combined interface } }
apache-2.0
frankfenghua/asyncrmi
src/main/java/org/async/rmi/config/Configuration.java
3008
package org.async.rmi.config; import org.async.rmi.TimeSpan; import org.async.rmi.Util; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.util.concurrent.TimeUnit; /** * Created by Barak Bar Orion * 05/10/14. */ public class Configuration { @SuppressWarnings("UnusedDeclaration") private static final Logger logger = LoggerFactory.getLogger(Configuration.class); private int configurePort = 0; private int actualPort; private TimeSpan clientConnectTimeout = new TimeSpan(30, TimeUnit.SECONDS); private TimeSpan clientTimeout = new TimeSpan(30, TimeUnit.SECONDS); private String serverHostName; private NetMap netMap; public static Configuration readDefault() { String ymlFileName = System.getProperty("java.rmi.server.config", "config.yml"); File ymlFile = new File(ymlFileName); if (ymlFile.exists()) { logger.debug("reading configuration from {}", ymlFile.getAbsolutePath()); try { return Util.readConfiguration(ymlFile); } catch (Exception e) { logger.error(e.toString(), e); return new Configuration(); } } else { return new Configuration(); } } public Configuration() { this.netMap = NetMap.empty(); } public int getConfigurePort() { return configurePort; } public String getServerHostName() { if (serverHostName == null) { return System.getProperty("java.rmi.server.hostname", null); } return serverHostName; } @SuppressWarnings("UnusedDeclaration") public void setServerHostName(String serverHostName) { this.serverHostName = serverHostName; } public void setConfigurePort(int configurePort) { this.configurePort = configurePort; } public int getActualPort() { return actualPort; } public void setActualPort(int actualPort) { this.actualPort = actualPort; } public TimeSpan getClientConnectTimeout() { return clientConnectTimeout; } public Configuration setClientConnectTimeout(long time, TimeUnit timeUnit) { this.clientConnectTimeout = new TimeSpan(time, timeUnit); return this; } public TimeSpan getClientTimeout() { return clientTimeout; } public void setClientTimeout(long time, TimeUnit timeUnit) { this.clientTimeout = new TimeSpan(time, timeUnit); } public NetMap getNetMap() { return netMap; } public void setNetMap(NetMap netMap) { this.netMap = netMap; } @SuppressWarnings("UnusedDeclaration") public void setClientConnectTimeout(TimeSpan clientConnectTimeout) { this.clientConnectTimeout = clientConnectTimeout; } @SuppressWarnings("UnusedDeclaration") public void setClientTimeout(TimeSpan clientTimeout) { this.clientTimeout = clientTimeout; } }
apache-2.0
filiphr/camunda-bpmn-model
src/main/java/org/camunda/bpm/model/bpmn/impl/instance/CorrelationSubscriptionImpl.java
3282
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.model.bpmn.impl.instance; import org.camunda.bpm.model.bpmn.instance.BaseElement; import org.camunda.bpm.model.bpmn.instance.CorrelationKey; import org.camunda.bpm.model.bpmn.instance.CorrelationPropertyBinding; import org.camunda.bpm.model.bpmn.instance.CorrelationSubscription; import org.camunda.bpm.model.xml.ModelBuilder; import org.camunda.bpm.model.xml.impl.instance.ModelTypeInstanceContext; import org.camunda.bpm.model.xml.type.ModelElementTypeBuilder; import org.camunda.bpm.model.xml.type.child.ChildElementCollection; import org.camunda.bpm.model.xml.type.child.SequenceBuilder; import org.camunda.bpm.model.xml.type.reference.AttributeReference; import java.util.Collection; import static org.camunda.bpm.model.bpmn.impl.BpmnModelConstants.*; import static org.camunda.bpm.model.xml.type.ModelElementTypeBuilder.ModelTypeInstanceProvider; /** * The BPMN correlationSubscription element * * @author Sebastian Menski */ public class CorrelationSubscriptionImpl extends BaseElementImpl implements CorrelationSubscription { protected static AttributeReference<CorrelationKey> correlationKeyAttribute; protected static ChildElementCollection<CorrelationPropertyBinding> correlationPropertyBindingCollection; public static void registerType(ModelBuilder modelBuilder) { ModelElementTypeBuilder typeBuilder = modelBuilder.defineType(CorrelationSubscription.class, BPMN_ELEMENT_CORRELATION_SUBSCRIPTION) .namespaceUri(BPMN20_NS) .extendsType(BaseElement.class) .instanceProvider(new ModelTypeInstanceProvider<CorrelationSubscription>() { public CorrelationSubscription newInstance(ModelTypeInstanceContext instanceContext) { return new CorrelationSubscriptionImpl(instanceContext); } }); correlationKeyAttribute = typeBuilder.stringAttribute(BPMN_ATTRIBUTE_CORRELATION_KEY_REF) .required() .qNameAttributeReference(CorrelationKey.class) .build(); SequenceBuilder sequenceBuilder = typeBuilder.sequence(); correlationPropertyBindingCollection = sequenceBuilder.elementCollection(CorrelationPropertyBinding.class) .build(); typeBuilder.build(); } public CorrelationSubscriptionImpl(ModelTypeInstanceContext instanceContext) { super(instanceContext); } public CorrelationKey getCorrelationKey() { return correlationKeyAttribute.getReferenceTargetElement(this); } public void setCorrelationKey(CorrelationKey correlationKey) { correlationKeyAttribute.setReferenceTargetElement(this, correlationKey); } public Collection<CorrelationPropertyBinding> getCorrelationPropertyBindings() { return correlationPropertyBindingCollection.get(this); } }
apache-2.0
hvmunlimited/QuickControlPanel
QuickControlPanel/src/main/java/com/woodblockwithoutco/quickcontroldock/model/buttons/LocalBroadcastTriToggleButton.java
2689
/******************************************************************************* * Copyright 2014 Alexander Leontyev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package com.woodblockwithoutco.quickcontroldock.model.buttons; import com.woodblockwithoutco.quickcontroldock.global.event.VisibilityEventNotifier; import com.woodblockwithoutco.quickcontroldock.global.event.VisibilityEventNotifier.OnVisibilityEventListener; import com.woodblockwithoutco.quickcontroldock.util.ReceiverUtil; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.support.v4.content.LocalBroadcastManager; import android.util.AttributeSet; public abstract class LocalBroadcastTriToggleButton extends BaseTriToggleButton implements OnVisibilityEventListener { protected IntentFilter mIntentFilter = new IntentFilter(); protected boolean mBroadcastRegistered = false; private BroadcastReceiver mReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { LocalBroadcastTriToggleButton.this.onReceive(intent); } }; public LocalBroadcastTriToggleButton(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); VisibilityEventNotifier.getInstance().registerListener(this); } public void startListeningForBroadcast(IntentFilter filter) { if(!mBroadcastRegistered) { mBroadcastRegistered = true; LocalBroadcastManager.getInstance(getContext()).registerReceiver(mReceiver, filter); } } public void stopListeningForBroadcast() { if(mBroadcastRegistered) { mBroadcastRegistered = false; ReceiverUtil.unregisterLocalReceiverSafe(getContext(), mReceiver); } } public abstract void onReceive(Intent intent); @Override public void onShow() { startListeningForBroadcast(mIntentFilter); setVisualState(mAction.getCurrentState()); } @Override public void onHide() { stopListeningForBroadcast(); } protected void addBroadcastAction(String action) { mIntentFilter.addAction(action); } }
apache-2.0
apurtell/hbase
hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java
9505
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.apache.hadoop.hbase.io.encoding; import static org.apache.hadoop.hbase.io.compress.Compression.Algorithm.NONE; import java.io.ByteArrayInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.security.SecureRandom; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.io.ByteArrayOutputStream; import org.apache.hadoop.hbase.io.TagCompressionContext; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.crypto.Cipher; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.Encryptor; import org.apache.hadoop.hbase.io.hfile.BlockType; import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.compress.CompressionOutputStream; import org.apache.hadoop.io.compress.Compressor; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; /** * A default implementation of {@link HFileBlockEncodingContext}. It will * compress the data section as one continuous buffer. * * @see HFileBlockDefaultDecodingContext for the decompression part * */ @InterfaceAudience.Private public class HFileBlockDefaultEncodingContext implements HFileBlockEncodingContext { private BlockType blockType; private final DataBlockEncoding encodingAlgo; private byte[] dummyHeader; // Compression state /** Compressor, which is also reused between consecutive blocks. */ private Compressor compressor; /** Compression output stream */ private CompressionOutputStream compressionStream; /** Underlying stream to write compressed bytes to */ private ByteArrayOutputStream compressedByteStream; private HFileContext fileContext; private TagCompressionContext tagCompressionContext; // Encryption state /** Underlying stream to write encrypted bytes to */ private ByteArrayOutputStream cryptoByteStream; /** Initialization vector */ private byte[] iv; private EncodingState encoderState; /** * @param conf configuraton * @param encoding encoding used * @param headerBytes dummy header bytes * @param fileContext HFile meta data */ public HFileBlockDefaultEncodingContext(Configuration conf, DataBlockEncoding encoding, byte[] headerBytes, HFileContext fileContext) { this.encodingAlgo = encoding; this.fileContext = fileContext; Compression.Algorithm compressionAlgorithm = fileContext.getCompression() == null ? NONE : fileContext.getCompression(); if (compressionAlgorithm != NONE) { if (compressor == null) { compressor = compressionAlgorithm.getCompressor(); // Some algorithms don't return compressors and accept null as a valid parameter for // same when creating compression streams. We can ignore these cases wrt reinit. if (compressor != null) { compressor.reinit(conf); } } compressedByteStream = new ByteArrayOutputStream(); try { compressionStream = compressionAlgorithm.createPlainCompressionStream( compressedByteStream, compressor); } catch (IOException e) { throw new RuntimeException( "Could not create compression stream for algorithm " + compressionAlgorithm, e); } } Encryption.Context cryptoContext = fileContext.getEncryptionContext(); if (cryptoContext != Encryption.Context.NONE) { cryptoByteStream = new ByteArrayOutputStream(); iv = new byte[cryptoContext.getCipher().getIvLength()]; new SecureRandom().nextBytes(iv); } dummyHeader = Preconditions.checkNotNull(headerBytes, "Please pass HConstants.HFILEBLOCK_DUMMY_HEADER instead of null for param headerBytes"); } /** * prepare to start a new encoding. */ public void prepareEncoding(DataOutputStream out) throws IOException { if (encodingAlgo != null && encodingAlgo != DataBlockEncoding.NONE) { encodingAlgo.writeIdInBytes(out); } } @Override public void postEncoding(BlockType blockType) throws IOException { this.blockType = blockType; } @Override public Bytes compressAndEncrypt(byte[] data, int offset, int length) throws IOException { return compressAfterEncoding(data, offset, length, dummyHeader); } private Bytes compressAfterEncoding(byte[] uncompressedBytesWithHeaderBuffer, int uncompressedBytesWithHeaderOffset, int uncompressedBytesWithHeaderLength, byte[] headerBytes) throws IOException { Encryption.Context cryptoContext = fileContext.getEncryptionContext(); if (cryptoContext != Encryption.Context.NONE) { // Encrypted block format: // +--------------------------+ // | byte iv length | // +--------------------------+ // | iv data ... | // +--------------------------+ // | encrypted block data ... | // +--------------------------+ cryptoByteStream.reset(); // Write the block header (plaintext) cryptoByteStream.write(headerBytes); InputStream in; int plaintextLength; // Run any compression before encryption if (fileContext.getCompression() != Compression.Algorithm.NONE) { compressedByteStream.reset(); compressionStream.resetState(); compressionStream.write(uncompressedBytesWithHeaderBuffer, headerBytes.length + uncompressedBytesWithHeaderOffset, uncompressedBytesWithHeaderLength - headerBytes.length); compressionStream.flush(); compressionStream.finish(); byte[] plaintext = compressedByteStream.toByteArray(); plaintextLength = plaintext.length; in = new ByteArrayInputStream(plaintext); } else { plaintextLength = uncompressedBytesWithHeaderLength - headerBytes.length; in = new ByteArrayInputStream(uncompressedBytesWithHeaderBuffer, headerBytes.length + uncompressedBytesWithHeaderOffset, plaintextLength); } if (plaintextLength > 0) { // Set up the cipher Cipher cipher = cryptoContext.getCipher(); Encryptor encryptor = cipher.getEncryptor(); encryptor.setKey(cryptoContext.getKey()); // Set up the IV int ivLength = iv.length; Preconditions.checkState(ivLength <= Byte.MAX_VALUE, "IV length out of range"); cryptoByteStream.write(ivLength); if (ivLength > 0) { encryptor.setIv(iv); cryptoByteStream.write(iv); } // Encrypt the data Encryption.encrypt(cryptoByteStream, in, encryptor); // Increment the IV given the final block size Encryption.incrementIv(iv, 1 + (cryptoByteStream.size() / encryptor.getBlockSize())); return new Bytes(cryptoByteStream.getBuffer(), 0, cryptoByteStream.size()); } else { cryptoByteStream.write(0); return new Bytes(cryptoByteStream.getBuffer(), 0, cryptoByteStream.size()); } } else { if (this.fileContext.getCompression() != NONE) { compressedByteStream.reset(); compressedByteStream.write(headerBytes); compressionStream.resetState(); compressionStream.write(uncompressedBytesWithHeaderBuffer, headerBytes.length + uncompressedBytesWithHeaderOffset, uncompressedBytesWithHeaderLength - headerBytes.length); compressionStream.flush(); compressionStream.finish(); return new Bytes(compressedByteStream.getBuffer(), 0, compressedByteStream.size()); } else { return null; } } } @Override public BlockType getBlockType() { return blockType; } /** * Releases the compressor this writer uses to compress blocks into the * compressor pool. */ @Override public void close() { if (compressor != null) { this.fileContext.getCompression().returnCompressor(compressor); compressor = null; } } @Override public DataBlockEncoding getDataBlockEncoding() { return this.encodingAlgo; } @Override public HFileContext getHFileContext() { return this.fileContext; } public TagCompressionContext getTagCompressionContext() { return tagCompressionContext; } public void setTagCompressionContext(TagCompressionContext tagCompressionContext) { this.tagCompressionContext = tagCompressionContext; } @Override public EncodingState getEncodingState() { return this.encoderState; } @Override public void setEncodingState(EncodingState state) { this.encoderState = state; } }
apache-2.0
cping/LGame
Java/Loon-Neo-GWT/src/loon/html5/gwt/emu2.8/com/google/gwt/corp/compatibility/ConsolePrintStream.java
1570
/* * Copyright 2010 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.gwt.corp.compatibility; import java.io.OutputStream; import java.io.PrintStream; public class ConsolePrintStream extends PrintStream { StringBuilder buf = new StringBuilder(); public ConsolePrintStream () { super((OutputStream)null); } public void print (String s) { while (true) { int cut = s.indexOf('\n'); if (cut == -1) { break; } println(s.substring(0, cut)); s = s.substring(cut + 1); } buf.append(s); } public native void consoleLog (String msg) /*-{ if (window.console) { window.console.log(msg); } else { document.title = "LOG:" + msg; } }-*/; public void print (char c) { if (c == '\n') { println(""); } else { buf.append(c); } } public void println () { println(""); } @Override public void println (String s) { buf.append(s); consoleLog(buf.toString()); buf.setLength(0); } }
apache-2.0
tweise/beam
runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/options/DataflowPipelineWorkerPoolOptions.java
10407
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.dataflow.options; import org.apache.beam.runners.dataflow.DataflowRunner; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.options.Default; import org.apache.beam.sdk.options.DefaultValueFactory; import org.apache.beam.sdk.options.Description; import org.apache.beam.sdk.options.Hidden; import org.apache.beam.sdk.options.PipelineOptions; import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.List; /** * Options that are used to configure the Dataflow pipeline worker pool. */ @Description("Options that are used to configure the Dataflow pipeline worker pool.") public interface DataflowPipelineWorkerPoolOptions extends PipelineOptions { /** * Number of workers to use when executing the Dataflow job. Note that selection of an autoscaling * algorithm other then {@code NONE} will affect the size of the worker pool. If left unspecified, * the Dataflow service will determine the number of workers. */ @Description("Number of workers to use when executing the Dataflow job. Note that " + "selection of an autoscaling algorithm other then \"NONE\" will affect the " + "size of the worker pool. If left unspecified, the Dataflow service will " + "determine the number of workers.") int getNumWorkers(); void setNumWorkers(int value); /** * Type of autoscaling algorithm to use. */ @Experimental(Experimental.Kind.AUTOSCALING) public enum AutoscalingAlgorithmType { /** Use numWorkers machines. Do not autoscale the worker pool. */ NONE("AUTOSCALING_ALGORITHM_NONE"), @Deprecated BASIC("AUTOSCALING_ALGORITHM_BASIC"), /** Autoscale the workerpool based on throughput (up to maxNumWorkers). */ THROUGHPUT_BASED("AUTOSCALING_ALGORITHM_BASIC"); private final String algorithm; private AutoscalingAlgorithmType(String algorithm) { this.algorithm = algorithm; } /** Returns the string representation of this type. */ public String getAlgorithm() { return this.algorithm; } } /** * [Experimental] The autoscaling algorithm to use for the workerpool. * * <ul> * <li>NONE: does not change the size of the worker pool.</li> * <li>BASIC: autoscale the worker pool size up to maxNumWorkers until the job completes.</li> * <li>THROUGHPUT_BASED: autoscale the workerpool based on throughput (up to maxNumWorkers). * </li> * </ul> */ @Description("[Experimental] The autoscaling algorithm to use for the workerpool. " + "NONE: does not change the size of the worker pool. " + "BASIC (deprecated): autoscale the worker pool size up to maxNumWorkers until the job " + "completes. " + "THROUGHPUT_BASED: autoscale the workerpool based on throughput (up to maxNumWorkers).") @Experimental(Experimental.Kind.AUTOSCALING) AutoscalingAlgorithmType getAutoscalingAlgorithm(); void setAutoscalingAlgorithm(AutoscalingAlgorithmType value); /** * The maximum number of workers to use for the workerpool. This options limits the size of the * workerpool for the lifetime of the job, including * <a href="https://cloud.google.com/dataflow/pipelines/updating-a-pipeline">pipeline updates</a>. * If left unspecified, the Dataflow service will compute a ceiling. */ @Description("The maximum number of workers to use for the workerpool. This options limits the " + "size of the workerpool for the lifetime of the job, including pipeline updates. " + "If left unspecified, the Dataflow service will compute a ceiling.") int getMaxNumWorkers(); void setMaxNumWorkers(int value); /** * Remote worker disk size, in gigabytes, or 0 to use the default size. */ @Description("Remote worker disk size, in gigabytes, or 0 to use the default size.") int getDiskSizeGb(); void setDiskSizeGb(int value); /** * Docker container image that executes Dataflow worker harness, residing in Google Container * Registry. */ @Default.InstanceFactory(WorkerHarnessContainerImageFactory.class) @Description("Docker container image that executes Dataflow worker harness, residing in Google " + " Container Registry.") @Hidden String getWorkerHarnessContainerImage(); void setWorkerHarnessContainerImage(String value); /** * Returns the default Docker container image that executes Dataflow worker harness, residing in * Google Container Registry. */ public static class WorkerHarnessContainerImageFactory implements DefaultValueFactory<String> { @Override public String create(PipelineOptions options) { DataflowPipelineOptions dataflowOptions = options.as(DataflowPipelineOptions.class); if (dataflowOptions.isStreaming()) { return DataflowRunner.STREAMING_WORKER_HARNESS_CONTAINER_IMAGE; } else { return DataflowRunner.BATCH_WORKER_HARNESS_CONTAINER_IMAGE; } } } /** * GCE <a href="https://cloud.google.com/compute/docs/networking">network</a> for launching * workers. * * <p>Default is up to the Dataflow service. */ @Description("GCE network for launching workers. For more information, see the reference " + "documentation https://cloud.google.com/compute/docs/networking. " + "Default is up to the Dataflow service.") String getNetwork(); void setNetwork(String value); /** * GCE <a href="https://cloud.google.com/compute/docs/networking">subnetwork</a> for launching * workers. * * <p>Default is up to the Dataflow service. Expected format is * regions/REGION/subnetworks/SUBNETWORK. * * <p>You may also need to specify network option. */ @Description("GCE subnetwork for launching workers. For more information, see the reference " + "documentation https://cloud.google.com/compute/docs/networking. " + "Default is up to the Dataflow service.") String getSubnetwork(); void setSubnetwork(String value); /** * GCE <a href="https://developers.google.com/compute/docs/zones" * >availability zone</a> for launching workers. * * <p>Default is up to the Dataflow service. */ @Description("GCE availability zone for launching workers. See " + "https://developers.google.com/compute/docs/zones for a list of valid options. " + "Default is up to the Dataflow service.") String getZone(); void setZone(String value); /** * Machine type to create Dataflow worker VMs as. * * <p>See <a href="https://cloud.google.com/compute/docs/machine-types">GCE machine types</a> * for a list of valid options. * * <p>If unset, the Dataflow service will choose a reasonable default. */ @Description("Machine type to create Dataflow worker VMs as. See " + "https://cloud.google.com/compute/docs/machine-types for a list of valid options. " + "If unset, the Dataflow service will choose a reasonable default.") String getWorkerMachineType(); void setWorkerMachineType(String value); /** * The policy for tearing down the workers spun up by the service. */ public enum TeardownPolicy { /** * All VMs created for a Dataflow job are deleted when the job finishes, regardless of whether * it fails or succeeds. */ TEARDOWN_ALWAYS("TEARDOWN_ALWAYS"), /** * All VMs created for a Dataflow job are left running when the job finishes, regardless of * whether it fails or succeeds. */ TEARDOWN_NEVER("TEARDOWN_NEVER"), /** * All VMs created for a Dataflow job are deleted when the job succeeds, but are left running * when it fails. (This is typically used for debugging failing jobs by SSHing into the * workers.) */ TEARDOWN_ON_SUCCESS("TEARDOWN_ON_SUCCESS"); private final String teardownPolicy; private TeardownPolicy(String teardownPolicy) { this.teardownPolicy = teardownPolicy; } public String getTeardownPolicyName() { return this.teardownPolicy; } } /** * The teardown policy for the VMs. * * <p>If unset, the Dataflow service will choose a reasonable default. */ @Description("The teardown policy for the VMs. If unset, the Dataflow service will " + "choose a reasonable default.") TeardownPolicy getTeardownPolicy(); void setTeardownPolicy(TeardownPolicy value); /** * List of local files to make available to workers. * * <p>Files are placed on the worker's classpath. * * <p>The default value is the list of jars from the main program's classpath. */ @Description("Files to stage on GCS and make available to workers. " + "Files are placed on the worker's classpath. " + "The default value is all files from the classpath.") @JsonIgnore List<String> getFilesToStage(); void setFilesToStage(List<String> value); /** * Specifies what type of persistent disk should be used. The value should be a full or partial * URL of a disk type resource, e.g., zones/us-central1-f/disks/pd-standard. For * more information, see the * <a href="https://cloud.google.com/compute/docs/reference/latest/diskTypes">API reference * documentation for DiskTypes</a>. */ @Description("Specifies what type of persistent disk should be used. The value should be a full " + "or partial URL of a disk type resource, e.g., zones/us-central1-f/disks/pd-standard. For " + "more information, see the API reference documentation for DiskTypes: " + "https://cloud.google.com/compute/docs/reference/latest/diskTypes") String getWorkerDiskType(); void setWorkerDiskType(String value); }
apache-2.0
tom1120/ninja
ninja-core/src/test/java/ninja/BootstrapTest.java
5713
/** * Copyright (C) 2012-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ninja; import ninja.cache.Cache; import static org.junit.Assert.assertTrue; import ninja.utils.NinjaConstant; import ninja.utils.NinjaMode; import ninja.utils.NinjaPropertiesImpl; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.isA; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.mockito.runners.MockitoJUnitRunner; /** * * @author ra */ @RunWith(MockitoJUnitRunner.class) public class BootstrapTest { NinjaPropertiesImpl ninjaPropertiesImpl; @Test public void testInitializeWithAllUserSpecifiedThingsInConfDirectory() { ninjaPropertiesImpl = new NinjaPropertiesImpl(NinjaMode.test); Bootstrap bootstrap = new Bootstrap(ninjaPropertiesImpl); bootstrap.boot(); assertThat( "Ninja Boostrap process picks up user supplied conf.Ninja definition", bootstrap.getInjector().getInstance(ninja.Ninja.class), is(instanceOf(conf.Ninja.class))); assertThat( "Ninja Boostrap process picks up user supplied Guice module in conf.Module", bootstrap.getInjector().getInstance(conf.Module.DummyInterfaceForTesting.class), is(instanceOf(conf.Module.DummyClassForTesting.class))); Router router = bootstrap.getInjector().getInstance(Router.class); Route route = router.getRouteFor("GET", "/"); assertThat( "conf.Routes initialized properly. We get back the class we defined by the route.", route.getControllerClass(), is(instanceOf(com.example.controllers.DummyApplication.class.getClass()))); } @Test public void noUserSuppliedThingsInConfDirectory() { // since we needed to supply conf.Ninja, etc. for our other tests, we'll // test a user NOT supplying these by configuring the application base package // a bit of a hack, but will work to force NOT finding anything ninjaPropertiesImpl = Mockito.spy(new NinjaPropertiesImpl(NinjaMode.test)); Mockito.when( ninjaPropertiesImpl.get(NinjaConstant.APPLICATION_MODULES_BASE_PACKAGE)) .thenReturn("com.doesnotexist"); Bootstrap bootstrap = new Bootstrap(ninjaPropertiesImpl); bootstrap.boot(); Router router = bootstrap.getInjector().getInstance(Router.class); try { Route route = router.getRouteFor("GET", "/"); fail(); } catch (IllegalStateException e) { assertThat(e.getMessage(), containsString("routes not compiled")); } } @Test public void testInitializeWithAllUserSpecifiedThingsInShiftedConfDirectory() { ninjaPropertiesImpl = Mockito.spy(new NinjaPropertiesImpl(NinjaMode.test)); Mockito.when( ninjaPropertiesImpl.get(NinjaConstant.APPLICATION_MODULES_BASE_PACKAGE)) .thenReturn("com.example"); Bootstrap bootstrap = new Bootstrap(ninjaPropertiesImpl); bootstrap.boot(); assertThat( "Ninja Boostrap process picks up user supplied conf.Ninja definition", bootstrap.getInjector().getInstance(ninja.Ninja.class), is(instanceOf(com.example.conf.Ninja.class))); assertThat( "Ninja Boostrap process picks up user supplied Guice module in conf.Module", bootstrap.getInjector().getInstance(com.example.conf.Module.DummyInterfaceForTesting.class), is(instanceOf(com.example.conf.Module.DummyClassForTesting.class))); Router router = bootstrap.getInjector().getInstance(Router.class); Route route = router.getRouteFor("GET", "/"); assertThat( "conf.Routes initialized properly. We get back the class we defined by the route.", route.getControllerClass(), is(instanceOf(com.example.controllers.DummyApplication.class.getClass()))); } @Test public void frameworkModuleSkipsNinjaClassicModule() { ninjaPropertiesImpl = Mockito.spy(new NinjaPropertiesImpl(NinjaMode.test)); Mockito.when( ninjaPropertiesImpl.get(NinjaConstant.APPLICATION_MODULES_BASE_PACKAGE)) .thenReturn("com.example.frameworkmodule"); Bootstrap bootstrap = new Bootstrap(ninjaPropertiesImpl); bootstrap.boot(); try { Cache cache = bootstrap.getInjector().getInstance(Cache.class); fail("cache should not have been found"); } catch (Exception e) { assertThat(e.getMessage(), containsString("No implementation for ninja.cache.Cache was bound")); } } }
apache-2.0
bdelacretaz/jackrabbit-oak
oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/security/user/GroupTest.java
30841
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.jcr.security.user; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.TreeSet; import javax.jcr.RepositoryException; import javax.jcr.Session; import javax.jcr.UnsupportedRepositoryOperationException; import org.apache.jackrabbit.api.security.user.Authorizable; import org.apache.jackrabbit.api.security.user.AuthorizableExistsException; import org.apache.jackrabbit.api.security.user.Group; import org.apache.jackrabbit.api.security.user.User; import org.apache.jackrabbit.test.NotExecutableException; import org.apache.jackrabbit.util.Text; import org.junit.Before; import org.junit.Test; /** * Tests for the {@link Group} implementation. */ public class GroupTest extends AbstractUserTest { private List<String> members = new ArrayList<String>(); @Before @Override protected void setUp() throws Exception { super.setUp(); group.addMember(userMgr.getAuthorizable(superuser.getUserID())); group.addMember(user); members.add(superuser.getUserID()); members.add(user.getID()); superuser.save(); } private static void assertTrueIsMember(Iterator<Authorizable> members, Authorizable auth) throws RepositoryException { boolean contained = false; while (members.hasNext() && !contained) { Object next = members.next(); assertTrue(next instanceof Authorizable); contained = ((Authorizable) next).getID().equals(auth.getID()); } assertTrue("The given set of members must contain '" + auth.getID() + '\'', contained); } private static void assertFalseIsMember(Iterator<Authorizable> members, Authorizable auth) throws RepositoryException { boolean contained = false; while (members.hasNext() && !contained) { Object next = members.next(); assertTrue(next instanceof Authorizable); contained = ((Authorizable) next).getID().equals(auth.getID()); } assertFalse("The given set of members must not contain '" + auth.getID() + '\'', contained); } private static void assertTrueMemberOfContainsGroup(Iterator<Group> groups, Group gr) throws RepositoryException { boolean contained = false; while (groups.hasNext() && !contained) { Object next = groups.next(); assertTrue(next instanceof Group); contained = ((Group) next).getID().equals(gr.getID()); } assertTrue("All members of a group must contain that group upon 'memberOf'.", contained); } private static void assertFalseMemberOfContainsGroup(Iterator<Group> groups, Group gr) throws RepositoryException { boolean contained = false; while (groups.hasNext() && !contained) { Object next = groups.next(); assertTrue(next instanceof Group); contained = ((Group) next).getID().equals(gr.getID()); } assertFalse("All members of a group must contain that group upon 'memberOf'.", contained); } @Test public void testIsGroup() throws NotExecutableException, RepositoryException { assertTrue(group.isGroup()); } @Test public void testGetID() throws NotExecutableException, RepositoryException { assertNotNull(group.getID()); assertNotNull(userMgr.getAuthorizable(group.getID()).getID()); } @Test public void testGetPrincipal() throws RepositoryException, NotExecutableException { assertNotNull(group.getPrincipal()); assertNotNull(userMgr.getAuthorizable(group.getID()).getPrincipal()); } @Test public void testGetPath() throws RepositoryException, NotExecutableException { assertNotNull(group.getPath()); assertNotNull(userMgr.getAuthorizable(group.getID()).getPath()); try { assertEquals(getNode(group, superuser).getPath(), group.getPath()); } catch (UnsupportedRepositoryOperationException e) { // ok. } } @Test public void testGetDeclaredMembers() throws NotExecutableException, RepositoryException { Iterator<Authorizable> it = group.getDeclaredMembers(); assertNotNull(it); while (it.hasNext()) { Authorizable a = it.next(); assertNotNull(a); members.remove(a.getID()); } assertTrue(members.isEmpty()); } @Test public void testGetMembers() throws NotExecutableException, RepositoryException { Iterator<Authorizable> it = group.getMembers(); assertNotNull(it); while (it.hasNext()) { assertTrue(it.next() != null); } } @Test public void testGetMembersAgainstIsMember() throws NotExecutableException, RepositoryException { Iterator<Authorizable> it = group.getMembers(); while (it.hasNext()) { Authorizable auth = it.next(); assertTrue(group.isMember(auth)); } } @Test public void testGetMembersAgainstMemberOf() throws NotExecutableException, RepositoryException { Iterator<Authorizable> it = group.getMembers(); while (it.hasNext()) { Authorizable auth = it.next(); assertTrueMemberOfContainsGroup(auth.memberOf(), group); } } @Test public void testGetDeclaredMembersAgainstDeclaredMemberOf() throws NotExecutableException, RepositoryException { Iterator<Authorizable> it = group.getDeclaredMembers(); while (it.hasNext()) { Authorizable auth = it.next(); assertTrueMemberOfContainsGroup(auth.declaredMemberOf(), group); } } @Test public void testGetMembersContainsDeclaredMembers() throws NotExecutableException, RepositoryException { List<String> l = new ArrayList<String>(); for (Iterator<Authorizable> it = group.getMembers(); it.hasNext(); ) { l.add(it.next().getID()); } for (Iterator<Authorizable> it = group.getDeclaredMembers(); it.hasNext(); ) { assertTrue("All declared members must also be part of the Iterator " + "returned upon getMembers()", l.contains(it.next().getID())); } } @Test public void testGetDeclaredMembersWithMemberRemoved() throws RepositoryException { String uid = user.getID(); user.remove(); superuser.save(); user = null; Iterator<Authorizable> it = group.getDeclaredMembers(); while (it.hasNext()) { Authorizable a = it.next(); assertFalse(uid.equals(a.getID())); } } @Test public void testAddMember() throws NotExecutableException, RepositoryException { User auth = getTestUser(superuser); Group newGroup = null; try { newGroup = userMgr.createGroup(createGroupId()); superuser.save(); assertFalse(newGroup.isMember(auth)); assertFalse(newGroup.removeMember(auth)); superuser.save(); assertTrue(newGroup.addMember(auth)); superuser.save(); assertTrue(newGroup.isMember(auth)); assertTrue(newGroup.isMember(userMgr.getAuthorizable(auth.getID()))); } finally { if (newGroup != null) { newGroup.removeMember(auth); newGroup.remove(); superuser.save(); } } } @Test public void testAddMembers() throws NotExecutableException, RepositoryException { User auth = getTestUser(superuser); Group newGroup = null; int size = 100; List<User> users = new ArrayList<User>(size); try { newGroup = userMgr.createGroup(createGroupId()); superuser.save(); for (int k = 0; k < size; k++) { users.add(userMgr.createUser("user_" + k, "pass_" + k)); } superuser.save(); for (User user : users) { assertTrue(newGroup.addMember(user)); } superuser.save(); for (User user : users) { assertTrue(newGroup.isMember(user)); } for (User user : users) { assertTrue(newGroup.removeMember(user)); } superuser.save(); for (User user : users) { assertFalse(newGroup.isMember(user)); } } finally { for (User user : users) { user.remove(); superuser.save(); } if (newGroup != null) { newGroup.removeMember(auth); newGroup.remove(); superuser.save(); } } } @Test public void testAddRemoveMember() throws NotExecutableException, RepositoryException { User auth = getTestUser(superuser); Group newGroup1 = null; Group newGroup2 = null; try { newGroup1 = userMgr.createGroup(createGroupId()); newGroup2 = userMgr.createGroup(createGroupId()); superuser.save(); assertFalse(newGroup1.isMember(auth)); assertFalse(newGroup1.removeMember(auth)); superuser.save(); assertFalse(newGroup2.isMember(auth)); assertFalse(newGroup2.removeMember(auth)); superuser.save(); assertTrue(newGroup1.addMember(auth)); superuser.save(); assertTrue(newGroup1.isMember(auth)); assertTrue(newGroup1.isMember(userMgr.getAuthorizable(auth.getID()))); assertTrue(newGroup2.addMember(auth)); superuser.save(); assertTrue(newGroup2.isMember(auth)); assertTrue(newGroup2.isMember(userMgr.getAuthorizable(auth.getID()))); assertTrue(newGroup1.removeMember(auth)); superuser.save(); assertTrue(newGroup2.removeMember(auth)); superuser.save(); assertTrue(newGroup1.addMember(auth)); superuser.save(); assertTrue(newGroup1.isMember(auth)); assertTrue(newGroup1.isMember(userMgr.getAuthorizable(auth.getID()))); assertTrue(newGroup1.removeMember(auth)); superuser.save(); } finally { if (newGroup1 != null) { newGroup1.removeMember(auth); newGroup1.remove(); superuser.save(); } if (newGroup2 != null) { newGroup2.removeMember(auth); newGroup2.remove(); superuser.save(); } } } @Test public void testIsDeclaredMember() throws RepositoryException, NotExecutableException { User auth = getTestUser(superuser); Group newGroup1 = null; Group newGroup2 = null; try { newGroup1 = userMgr.createGroup(createGroupId()); newGroup2 = userMgr.createGroup(createGroupId()); superuser.save(); assertFalse(newGroup1.isDeclaredMember(auth)); assertFalse(newGroup2.isDeclaredMember(auth)); assertTrue(newGroup2.addMember(auth)); superuser.save(); assertTrue(newGroup2.isDeclaredMember(auth)); assertTrue(newGroup2.isDeclaredMember(userMgr.getAuthorizable(auth.getID()))); assertTrue(newGroup1.addMember(newGroup2)); superuser.save(); assertTrue(newGroup1.isDeclaredMember(newGroup2)); assertTrue(newGroup1.isDeclaredMember(userMgr.getAuthorizable(newGroup2.getID()))); assertTrue(newGroup1.isMember(auth)); assertTrue(newGroup1.isMember(userMgr.getAuthorizable(auth.getID()))); assertFalse(newGroup1.isDeclaredMember(auth)); assertFalse(newGroup1.isDeclaredMember(userMgr.getAuthorizable(auth.getID()))); } finally { if (newGroup1 != null) { newGroup1.remove(); superuser.save(); } if (newGroup2 != null) { newGroup2.remove(); superuser.save(); } } } @Test public void testAddMemberTwice() throws NotExecutableException, RepositoryException { User auth = getTestUser(superuser); Group newGroup = null; try { newGroup = userMgr.createGroup(createGroupId()); superuser.save(); assertTrue(newGroup.addMember(auth)); superuser.save(); assertFalse(newGroup.addMember(auth)); superuser.save(); assertTrue(newGroup.isMember(auth)); } finally { if (newGroup != null) { newGroup.removeMember(auth); newGroup.remove(); superuser.save(); } } } @Test public void testAddMemberModifiesMemberOf() throws NotExecutableException, RepositoryException { User auth = getTestUser(superuser); Group newGroup = null; try { newGroup = userMgr.createGroup(createGroupId()); superuser.save(); assertFalseMemberOfContainsGroup(auth.memberOf(), newGroup); assertTrue(newGroup.addMember(auth)); superuser.save(); assertTrueMemberOfContainsGroup(auth.declaredMemberOf(), newGroup); assertTrueMemberOfContainsGroup(auth.memberOf(), newGroup); } finally { if (newGroup != null) { newGroup.removeMember(auth); newGroup.remove(); superuser.save(); } } } @Test public void testAddMemberModifiesGetMembers() throws NotExecutableException, RepositoryException { User auth = getTestUser(superuser); Group newGroup = null; try { newGroup = userMgr.createGroup(createGroupId()); superuser.save(); assertFalseIsMember(newGroup.getMembers(), auth); assertFalseIsMember(newGroup.getDeclaredMembers(), auth); assertTrue(newGroup.addMember(auth)); superuser.save(); assertTrueIsMember(newGroup.getMembers(), auth); assertTrueIsMember(newGroup.getDeclaredMembers(), auth); } finally { if (newGroup != null) { newGroup.removeMember(auth); newGroup.remove(); superuser.save(); } } } @Test public void testIndirectMembers() throws NotExecutableException, RepositoryException { User user = getTestUser(superuser); Group newGroup = null; Group newGroup2 = null; try { newGroup = userMgr.createGroup(createGroupId()); newGroup2 = userMgr.createGroup(createGroupId()); superuser.save(); newGroup.addMember(newGroup2); superuser.save(); assertTrue(newGroup.isMember(newGroup2)); newGroup2.addMember(user); superuser.save(); // testuser must not be declared member of 'newGroup' assertFalseIsMember(newGroup.getDeclaredMembers(), user); assertFalseMemberOfContainsGroup(user.declaredMemberOf(), newGroup); // testuser must however be member of 'newGroup' (indirect). assertTrueIsMember(newGroup.getMembers(), user); assertTrueMemberOfContainsGroup(user.memberOf(), newGroup); // testuser cannot be removed from 'newGroup' assertFalse(newGroup.removeMember(user)); superuser.save(); } finally { if (newGroup != null) { newGroup.removeMember(newGroup2); newGroup.remove(); superuser.save(); } if (newGroup2 != null) { newGroup2.removeMember(user); newGroup2.remove(); superuser.save(); } } } @Test public void testMembersInPrincipal() throws NotExecutableException, RepositoryException { User auth = getTestUser(superuser); Group newGroup = null; Group newGroup2 = null; try { newGroup = userMgr.createGroup(createGroupId()); newGroup2 = userMgr.createGroup(createGroupId()); superuser.save(); newGroup.addMember(newGroup2); superuser.save(); newGroup2.addMember(auth); superuser.save(); java.security.acl.Group ngPrincipal = (java.security.acl.Group) newGroup.getPrincipal(); java.security.acl.Group ng2Principal = (java.security.acl.Group) newGroup2.getPrincipal(); assertFalse(ng2Principal.isMember(ngPrincipal)); // newGroup2 must be member of newGroup's principal assertTrue(ngPrincipal.isMember(newGroup2.getPrincipal())); // testuser must be member of newGroup2's and newGroup's principal (indirect) assertTrue(ng2Principal.isMember(auth.getPrincipal())); assertTrue(ngPrincipal.isMember(auth.getPrincipal())); } finally { if (newGroup != null) { newGroup.removeMember(newGroup2); newGroup.remove(); superuser.save(); } if (newGroup2 != null) { newGroup2.removeMember(auth); newGroup2.remove(); superuser.save(); } } } @Test public void testDeeplyNestedGroups() throws NotExecutableException, RepositoryException { Set<Group> groups = new HashSet<Group>(); try { User auth = getTestUser(superuser); Group topGroup = userMgr.createGroup(createGroupId()); // Create chain of nested groups with auth member of bottom group Group bottomGroup = topGroup; for (int k = 0; k < 100; k++) { Group g = userMgr.createGroup(createGroupId()); groups.add(g); bottomGroup.addMember(g); bottomGroup = g; } bottomGroup.addMember(auth); // Check that every groups has exactly one member for (Group g : groups) { Iterator<Authorizable> declaredMembers = g.getDeclaredMembers(); assertTrue(declaredMembers.hasNext()); declaredMembers.next(); assertFalse(declaredMembers.hasNext()); } // Check that we get all members from the getMembers call HashSet<Group> allGroups = new HashSet<Group>(groups); for (Iterator<Authorizable> it = topGroup.getMembers(); it.hasNext(); ) { Authorizable a = it.next(); assertTrue(a.equals(auth) || allGroups.remove(a)); } assertTrue(allGroups.isEmpty()); } finally { for (Group g : groups) { g.remove(); } } } @Test public void testInheritedMembers() throws Exception { Set<Authorizable> authorizables = new HashSet<Authorizable>(); try { User testUser = userMgr.createUser(createUserId(), "pw"); authorizables.add(testUser); Group group1 = userMgr.createGroup(createGroupId()); authorizables.add(group1); Group group2 = userMgr.createGroup(createGroupId()); authorizables.add(group2); Group group3 = userMgr.createGroup(createGroupId()); group1.addMember(testUser); group2.addMember(testUser); group3.addMember(group1); group3.addMember(group2); Iterator<Authorizable> members = group3.getMembers(); while (members.hasNext()) { Authorizable a = members.next(); assertTrue(authorizables.contains(a)); assertTrue(authorizables.remove(a)); } assertTrue(authorizables.isEmpty()); } finally { for (Authorizable a : authorizables) { a.remove(); } } } @Test public void testCyclicGroups() throws AuthorizableExistsException, RepositoryException, NotExecutableException { Group group1 = null; Group group2 = null; Group group3 = null; try { group1 = userMgr.createGroup(createGroupId()); group2 = userMgr.createGroup(createGroupId()); group3 = userMgr.createGroup(createGroupId()); assertTrue(group1.addMember(group2)); superuser.save(); assertTrue(group2.addMember(group3)); superuser.save(); assertFalse(group3.addMember(group1)); superuser.save(); } finally { if (group1 != null) group1.remove(); if (group2 != null) group2.remove(); if (group3 != null) group3.remove(); } } @Test public void testCyclicGroups2() throws AuthorizableExistsException, RepositoryException, NotExecutableException { Group group1 = null; Group group2 = null; Group group3 = null; try { group1 = userMgr.createGroup(createGroupId()); group2 = userMgr.createGroup(createGroupId()); group3 = userMgr.createGroup(createGroupId()); assertTrue(group1.addMember(group2)); assertTrue(group2.addMember(group3)); assertFalse("Cyclic group membership must be detected.", group3.addMember(group1)); } catch (RepositoryException e) { // success } finally { if (group1 != null) group1.remove(); if (group2 != null) group2.remove(); if (group3 != null) group3.remove(); } } @Test public void testRemoveMemberTwice() throws NotExecutableException, RepositoryException { User auth = getTestUser(superuser); Group newGroup = null; try { newGroup = userMgr.createGroup(createGroupId()); superuser.save(); assertTrue(newGroup.addMember(auth)); superuser.save(); assertTrue(newGroup.removeMember(userMgr.getAuthorizable(auth.getID()))); superuser.save(); assertFalse(newGroup.removeMember(auth)); superuser.save(); } finally { if (newGroup != null) { newGroup.remove(); superuser.save(); } } } @Test public void testAddItselfAsMember() throws RepositoryException, NotExecutableException { Group newGroup = null; try { newGroup = userMgr.createGroup(createGroupId()); superuser.save(); assertFalse(newGroup.addMember(newGroup)); superuser.save(); newGroup.removeMember(newGroup); superuser.save(); } finally { if (newGroup != null) { newGroup.remove(); superuser.save(); } } } @Test public void testRemoveGroupIfMemberExist() throws RepositoryException, NotExecutableException { User auth = getTestUser(superuser); String newGroupId = null; try { Group newGroup = userMgr.createGroup(createGroupId()); superuser.save(); newGroupId = newGroup.getID(); assertTrue(newGroup.addMember(auth)); newGroup.remove(); superuser.save(); } finally { Group gr = (Group) userMgr.getAuthorizable(newGroupId); if (gr != null) { gr.removeMember(auth); gr.remove(); superuser.save(); } } } @Test public void testRemoveGroupClearsMembership() throws NotExecutableException, RepositoryException { User auth = getTestUser(superuser); Group newGroup = null; String groupId; try { newGroup = userMgr.createGroup(createGroupId()); groupId = newGroup.getID(); superuser.save(); assertTrue(newGroup.addMember(auth)); superuser.save(); boolean isMember = false; Iterator<Group> it = auth.declaredMemberOf(); while (it.hasNext() && !isMember) { isMember = groupId.equals(it.next().getID()); } assertTrue(isMember); } finally { if (newGroup != null) { newGroup.remove(); superuser.save(); } } Iterator<Group> it = auth.declaredMemberOf(); while (it.hasNext()) { assertFalse(groupId.equals(it.next().getID())); } it = auth.memberOf(); while (it.hasNext()) { assertFalse(groupId.equals(it.next().getID())); } } @Test public void testMoveUserToOtherGroup() throws Exception { User user1 = userMgr.createUser(createUserId(), "p"); User user2 = userMgr.createUser(createUserId(), "p"); Group grp1 = userMgr.createGroup(createGroupId()); Group grp2 = userMgr.createGroup(createGroupId()); grp1.addMember(user1); grp1.addMember(user2); superuser.save(); checkDeclaredMembers(grp1, user1.getID(), user2.getID()); checkDeclaredMembers(grp2); checkDeclaredMemberOf(user1, grp1.getID()); checkDeclaredMemberOf(user2, grp1.getID()); grp1.removeMember(user1); superuser.save(); checkDeclaredMembers(grp1, user2.getID()); checkDeclaredMembers(grp2); checkDeclaredMemberOf(user1); checkDeclaredMemberOf(user2, grp1.getID()); grp2.addMember(user1); superuser.save(); checkDeclaredMembers(grp1, user2.getID()); checkDeclaredMembers(grp2, user1.getID()); checkDeclaredMemberOf(user1, grp2.getID()); checkDeclaredMemberOf(user2, grp1.getID()); } @Test public void testRemoveMembership() throws RepositoryException { String grId2 = createGroupId(); Group gr2 = null; try { gr2 = userMgr.createGroup(grId2); gr2.addMember(user); superuser.save(); Iterator<Group> groups = user.declaredMemberOf(); while (groups.hasNext()) { Group group = groups.next(); group.removeMember(user); superuser.save(); } assertFalse(userMgr.getAuthorizable(group.getID(), Group.class).isDeclaredMember(user)); assertFalse(userMgr.getAuthorizable(grId2, Group.class).isDeclaredMember(user)); groups = user.declaredMemberOf(); while (groups.hasNext()) { String id = groups.next().getID(); assertFalse(group.getID().equals(id)); assertFalse(grId2.equals(id)); } } finally { if (gr2 != null) { gr2.remove(); superuser.save(); } } } @Test public void testRemoveMembershipWithDifferentSessions() throws Exception { String grId2 = createGroupId(); Group gr2 = null; Session s2 = null; Session s3 = null; try { gr2 = userMgr.createGroup(grId2); gr2.addMember(user); superuser.save(); s2 = getHelper().getReadWriteSession(); Authorizable u2 = getUserManager(s2).getAuthorizable(user.getID()); Iterator<Group> groups = u2.declaredMemberOf(); while (groups.hasNext()) { Group group = groups.next(); group.removeMember(u2); } s2.save(); s3 = getHelper().getReadWriteSession(); Authorizable u3 = getUserManager(s3).getAuthorizable(user.getID()); assertFalse(getUserManager(s3).getAuthorizable(group.getID(), Group.class).isDeclaredMember(u3)); assertFalse(getUserManager(s3).getAuthorizable(grId2, Group.class).isDeclaredMember(u3)); groups = u3.declaredMemberOf(); while (groups.hasNext()) { String id = groups.next().getID(); assertFalse(group.getID().equals(id)); assertFalse(grId2.equals(id)); } } finally { if (gr2 != null) { gr2.remove(); superuser.save(); } if (s2 != null) { s2.logout(); } if (s3 != null) { s3.logout(); } } } private void checkDeclaredMembers(Group grp, String ... ids) throws RepositoryException { TreeSet<String> members = new TreeSet<String>(); Iterator<Authorizable> iter = grp.getMembers(); while (iter.hasNext()) { members.add(iter.next().getID()); } Arrays.sort(ids); assertEquals( "Group members", Text.implode(ids, ","), Text.implode(members.toArray(new String[members.size()]), ",")); } private void checkDeclaredMemberOf(Authorizable auth, String ... ids) throws RepositoryException { TreeSet<String> members = new TreeSet<String>(); Iterator<Group> iter = auth.declaredMemberOf(); while (iter.hasNext()) { members.add(iter.next().getID()); } Arrays.sort(ids); assertEquals( "Group memberships", Text.implode(ids, ","), Text.implode(members.toArray(new String[members.size()]), ",")); } }
apache-2.0
hejunbinlan/MVPAndroidBootstrap
app/src/main/java/com/richardradics/cleanaa/interactor/BaseInteractor.java
554
package com.richardradics.cleanaa.interactor; import com.richardradics.cleanaa.repository.CleanRepository; import com.richardradics.cleanaa.repository.api.OpenWeatherClient; import com.richardradics.cleanaa.app.CleanDatabase; import org.androidannotations.annotations.Bean; import org.androidannotations.annotations.EBean; /** * Created by radicsrichard on 15. 05. 13.. */ @EBean public class BaseInteractor { @Bean protected CleanDatabase cleanDatabase; @Bean(OpenWeatherClient.class) protected CleanRepository cleanRepository; }
apache-2.0
flofreud/aws-sdk-java
aws-java-sdk-datapipeline/src/main/java/com/amazonaws/services/datapipeline/model/transform/RemoveTagsRequestMarshaller.java
3740
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.datapipeline.model.transform; import java.io.ByteArrayInputStream; import java.util.Collections; import java.util.Map; import java.util.List; import java.util.regex.Pattern; import com.amazonaws.AmazonClientException; import com.amazonaws.Request; import com.amazonaws.DefaultRequest; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.datapipeline.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.util.BinaryUtils; import com.amazonaws.util.StringUtils; import com.amazonaws.util.IdempotentUtils; import com.amazonaws.util.StringInputStream; import com.amazonaws.protocol.json.*; /** * RemoveTagsRequest Marshaller */ public class RemoveTagsRequestMarshaller implements Marshaller<Request<RemoveTagsRequest>, RemoveTagsRequest> { private final SdkJsonProtocolFactory protocolFactory; public RemoveTagsRequestMarshaller(SdkJsonProtocolFactory protocolFactory) { this.protocolFactory = protocolFactory; } public Request<RemoveTagsRequest> marshall( RemoveTagsRequest removeTagsRequest) { if (removeTagsRequest == null) { throw new AmazonClientException( "Invalid argument passed to marshall(...)"); } Request<RemoveTagsRequest> request = new DefaultRequest<RemoveTagsRequest>( removeTagsRequest, "DataPipeline"); request.addHeader("X-Amz-Target", "DataPipeline.RemoveTags"); request.setHttpMethod(HttpMethodName.POST); request.setResourcePath(""); try { final StructuredJsonGenerator jsonGenerator = protocolFactory .createGenerator(); jsonGenerator.writeStartObject(); if (removeTagsRequest.getPipelineId() != null) { jsonGenerator.writeFieldName("pipelineId").writeValue( removeTagsRequest.getPipelineId()); } com.amazonaws.internal.SdkInternalList<String> tagKeysList = (com.amazonaws.internal.SdkInternalList<String>) removeTagsRequest .getTagKeys(); if (!tagKeysList.isEmpty() || !tagKeysList.isAutoConstruct()) { jsonGenerator.writeFieldName("tagKeys"); jsonGenerator.writeStartArray(); for (String tagKeysListValue : tagKeysList) { if (tagKeysListValue != null) { jsonGenerator.writeValue(tagKeysListValue); } } jsonGenerator.writeEndArray(); } jsonGenerator.writeEndObject(); byte[] content = jsonGenerator.getBytes(); request.setContent(new ByteArrayInputStream(content)); request.addHeader("Content-Length", Integer.toString(content.length)); request.addHeader("Content-Type", jsonGenerator.getContentType()); } catch (Throwable t) { throw new AmazonClientException( "Unable to marshall request to JSON: " + t.getMessage(), t); } return request; } }
apache-2.0
skoulouzis/vlet-1.5.0
source/core/nl.uva.vlet.glite.lfc/src/nl/uva/vlet/glite/lfc/internal/CnsChmodRequest.java
2149
/* * Initial development of the original code was made for the * g-Eclipse project founded by European Union * project number: FP6-IST-034327 http://www.geclipse.eu/ * * Contributors: * Mateusz Pabis (PSNC) - initial API and implementation * Piter T. de boer - Refactoring to standalone API and bugfixing. * Spiros Koulouzis - Refactoring to standalone API and bugfixing. */ package nl.uva.vlet.glite.lfc.internal; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import nl.uva.vlet.glite.lfc.IOUtil; import nl.uva.vlet.glite.lfc.LFCServer; /** * chmod Request. * * @author S. Koulouzis */ public class CnsChmodRequest { private int uid = 0; private int gid = 0; private long cwd = 0; private String path = null; private int mode; /** * Creates request for chmod. */ public CnsChmodRequest(final String path, int mode) { this.path = path; this.uid = 0; this.gid = 0; this.cwd = 0; this.mode = mode; } public CnsChmodResponse sendTo(final DataOutputStream output, final DataInputStream input) throws IOException { LFCServer .staticLogIOMessage("Sending chmod request for: " + this.path); // Build request header CnsMessage msg = CnsMessage.createSendMessage(CnsConstants.CNS_MAGIC, CnsConstants.CNS_CHMOD); DataOutputStream dataOut = msg.createBodyDataOutput(4096); mode &= 07777; //Build request body dataOut.writeInt(this.uid); // +4 dataOut.writeInt(this.gid); // +4 dataOut.writeLong(this.cwd); // +8 IOUtil.writeString(dataOut, path); // +1+length() dataOut.writeInt(this.mode); // no need to flush databuffer not close it. // finalize and send ! int numSend = msg.sendTo(output); output.flush(); // sync msg.dispose(); CnsChmodResponse result = new CnsChmodResponse(); result.readFrom( input ); return result; } }
apache-2.0
huitseeker/nd4j
nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/api/ndarray/TestJSONC.java
4288
package org.nd4j.linalg.api.ndarray; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.nd4j.linalg.BaseNd4jTest; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.factory.Nd4jBackend; import org.nd4j.linalg.io.Assert; import org.nd4j.linalg.ops.transforms.Transforms; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; /** * Created by susaneraly on 6/18/16. */ @RunWith(Parameterized.class) @Ignore public class TestJSONC extends BaseNd4jTest { public TestJSONC(Nd4jBackend backend) { super(backend); } @Test @Ignore public void TestReadWrite() { INDArray origArr = Nd4j.rand('c', 10, 10).muli(100); //since we write only two decimal points.. NdArrayJSONWriter.write(origArr, "someArr.json"); NdArrayJSONReader jsonReader = new NdArrayJSONReader(); INDArray readBack = jsonReader.read(new File("someArr.json")); System.out.println("========================================================================="); System.out.println(origArr); System.out.println("========================================================================="); System.out.println(readBack); Assert.isTrue(Transforms.abs(origArr.subi(readBack)).maxNumber().doubleValue() < 0.09); try { Files.delete(Paths.get("someArr.json")); } catch (IOException e) { e.printStackTrace(); } } @Test @Ignore public void TestReadWriteSimple() { INDArray origArr = Nd4j.rand(1, 1).muli(100); //since we write only two decimal points.. NdArrayJSONWriter.write(origArr, "someArr.json"); NdArrayJSONReader jsonReader = new NdArrayJSONReader(); INDArray readBack = jsonReader.read(new File("someArr.json")); System.out.println("========================================================================="); System.out.println(origArr); System.out.println("========================================================================="); System.out.println(readBack); Assert.isTrue(Transforms.abs(origArr.subi(readBack)).maxNumber().doubleValue() < 0.09); try { Files.delete(Paths.get("someArr.json")); } catch (IOException e) { e.printStackTrace(); } } @Test @Ignore public void TestReadWriteNd() { INDArray origArr = Nd4j.rand(13, 2, 11, 3, 7, 19).muli(100); //since we write only two decimal points.. NdArrayJSONWriter.write(origArr, "someArr.json"); NdArrayJSONReader jsonReader = new NdArrayJSONReader(); INDArray readBack = jsonReader.read(new File("someArr.json")); System.out.println("========================================================================="); System.out.println(origArr); System.out.println("========================================================================="); System.out.println(readBack); Assert.isTrue(Transforms.abs(origArr.subi(readBack)).maxNumber().doubleValue() < 0.09); try { Files.delete(Paths.get("someArr.json")); } catch (IOException e) { e.printStackTrace(); } } @Test public void TestWierdShape() { INDArray origArr = Nd4j.rand(1, 1, 2, 1, 1).muli(100); //since we write only two decimal points.. NdArrayJSONWriter.write(origArr, "someArr.json"); NdArrayJSONReader jsonReader = new NdArrayJSONReader(); INDArray readBack = jsonReader.read(new File("someArr.json")); System.out.println("========================================================================="); System.out.println(origArr); System.out.println("========================================================================="); System.out.println(readBack); Assert.isTrue(Transforms.abs(origArr.subi(readBack)).maxNumber().doubleValue() < 0.09); try { Files.delete(Paths.get("someArr.json")); } catch (IOException e) { e.printStackTrace(); } } @Override public char ordering() { return 'c'; } }
apache-2.0
ChinaQuants/Strata
modules/math/src/main/java/com/opengamma/strata/math/impl/rootfinding/newton/NewtonDefaultUpdateFunction.java
854
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.strata.math.impl.rootfinding.newton; import java.util.function.Function; import com.opengamma.strata.collect.ArgChecker; import com.opengamma.strata.collect.array.DoubleArray; import com.opengamma.strata.collect.array.DoubleMatrix; /** * */ public class NewtonDefaultUpdateFunction implements NewtonRootFinderMatrixUpdateFunction { @Override public DoubleMatrix getUpdatedMatrix( Function<DoubleArray, DoubleMatrix> jacobianFunction, DoubleArray x, DoubleArray deltaX, DoubleArray deltaY, DoubleMatrix matrix) { ArgChecker.notNull(jacobianFunction, "jacobianFunction"); ArgChecker.notNull(x, "x"); return jacobianFunction.apply(x); } }
apache-2.0
Apeksi1990/asemenov
chapter_004/src/test/java/ru/asemenov/SimpleGenerator/package-info.java
99
/** * @author Semenov Alexey * @version 1 * @since 0.1 */ package ru.asemenov.SimpleGenerator;
apache-2.0
aljoscha/flink
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/environment/CheckpointConfig.java
28501
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.api.environment; import org.apache.flink.annotation.Experimental; import org.apache.flink.annotation.Public; import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.api.common.JobStatus; import org.apache.flink.configuration.ReadableConfig; import org.apache.flink.streaming.api.CheckpointingMode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static java.util.Objects.requireNonNull; import static org.apache.flink.runtime.checkpoint.CheckpointFailureManager.UNLIMITED_TOLERABLE_FAILURE_NUMBER; import static org.apache.flink.runtime.jobgraph.tasks.CheckpointCoordinatorConfiguration.MINIMAL_CHECKPOINT_TIME; import static org.apache.flink.util.Preconditions.checkNotNull; /** Configuration that captures all checkpointing related settings. */ @Public public class CheckpointConfig implements java.io.Serializable { private static final long serialVersionUID = -750378776078908147L; private static final Logger LOG = LoggerFactory.getLogger(CheckpointConfig.class); /** The default checkpoint mode: exactly once. */ public static final CheckpointingMode DEFAULT_MODE = CheckpointingMode.EXACTLY_ONCE; /** The default timeout of a checkpoint attempt: 10 minutes. */ public static final long DEFAULT_TIMEOUT = 10 * 60 * 1000; /** The default minimum pause to be made between checkpoints: none. */ public static final long DEFAULT_MIN_PAUSE_BETWEEN_CHECKPOINTS = 0; /** The default limit of concurrently happening checkpoints: one. */ public static final int DEFAULT_MAX_CONCURRENT_CHECKPOINTS = 1; public static final int UNDEFINED_TOLERABLE_CHECKPOINT_NUMBER = -1; // ------------------------------------------------------------------------ /** Checkpointing mode (exactly-once vs. at-least-once). */ private CheckpointingMode checkpointingMode = DEFAULT_MODE; /** Periodic checkpoint triggering interval. */ private long checkpointInterval = -1; // disabled /** Maximum time checkpoint may take before being discarded. */ private long checkpointTimeout = DEFAULT_TIMEOUT; /** Minimal pause between checkpointing attempts. */ private long minPauseBetweenCheckpoints = DEFAULT_MIN_PAUSE_BETWEEN_CHECKPOINTS; /** Maximum number of checkpoint attempts in progress at the same time. */ private int maxConcurrentCheckpoints = DEFAULT_MAX_CONCURRENT_CHECKPOINTS; /** Flag to force checkpointing in iterative jobs. */ private boolean forceCheckpointing; /** Flag to force checkpointing in iterative jobs. */ private boolean forceUnalignedCheckpoints; /** Flag to enable unaligned checkpoints. */ private boolean unalignedCheckpointsEnabled; private long alignmentTimeout = ExecutionCheckpointingOptions.ALIGNMENT_TIMEOUT.defaultValue().toMillis(); /** Flag to enable approximate local recovery. */ private boolean approximateLocalRecovery; /** Cleanup behaviour for persistent checkpoints. */ private ExternalizedCheckpointCleanup externalizedCheckpointCleanup; /** * Task would not fail if there is an error in their checkpointing. * * <p>{@link #tolerableCheckpointFailureNumber} would always overrule this deprecated field if * they have conflicts. * * @deprecated Use {@link #tolerableCheckpointFailureNumber}. */ @Deprecated private boolean failOnCheckpointingErrors = true; /** Determines if a job will fallback to checkpoint when there is a more recent savepoint. * */ private boolean preferCheckpointForRecovery = false; /** * Determines the threshold that we tolerance declined checkpoint failure number. The default * value is -1 meaning undetermined and not set via {@link * #setTolerableCheckpointFailureNumber(int)}. */ private int tolerableCheckpointFailureNumber = UNDEFINED_TOLERABLE_CHECKPOINT_NUMBER; /** * Creates a deep copy of the provided {@link CheckpointConfig}. * * @param checkpointConfig the config to copy. */ public CheckpointConfig(final CheckpointConfig checkpointConfig) { checkNotNull(checkpointConfig); this.checkpointInterval = checkpointConfig.checkpointInterval; this.checkpointingMode = checkpointConfig.checkpointingMode; this.checkpointTimeout = checkpointConfig.checkpointTimeout; this.maxConcurrentCheckpoints = checkpointConfig.maxConcurrentCheckpoints; this.minPauseBetweenCheckpoints = checkpointConfig.minPauseBetweenCheckpoints; this.preferCheckpointForRecovery = checkpointConfig.preferCheckpointForRecovery; this.tolerableCheckpointFailureNumber = checkpointConfig.tolerableCheckpointFailureNumber; this.unalignedCheckpointsEnabled = checkpointConfig.isUnalignedCheckpointsEnabled(); this.alignmentTimeout = checkpointConfig.alignmentTimeout; this.approximateLocalRecovery = checkpointConfig.isApproximateLocalRecoveryEnabled(); this.externalizedCheckpointCleanup = checkpointConfig.externalizedCheckpointCleanup; this.forceCheckpointing = checkpointConfig.forceCheckpointing; this.forceUnalignedCheckpoints = checkpointConfig.forceUnalignedCheckpoints; this.tolerableCheckpointFailureNumber = checkpointConfig.tolerableCheckpointFailureNumber; } public CheckpointConfig() {} // ------------------------------------------------------------------------ /** Disables checkpointing. */ public void disableCheckpointing() { this.checkpointInterval = -1; } /** * Checks whether checkpointing is enabled. * * @return True if checkpointing is enables, false otherwise. */ public boolean isCheckpointingEnabled() { return checkpointInterval > 0; } /** * Gets the checkpointing mode (exactly-once vs. at-least-once). * * @return The checkpointing mode. */ public CheckpointingMode getCheckpointingMode() { return checkpointingMode; } /** * Sets the checkpointing mode (exactly-once vs. at-least-once). * * @param checkpointingMode The checkpointing mode. */ public void setCheckpointingMode(CheckpointingMode checkpointingMode) { this.checkpointingMode = requireNonNull(checkpointingMode); } /** * Gets the interval in which checkpoints are periodically scheduled. * * <p>This setting defines the base interval. Checkpoint triggering may be delayed by the * settings {@link #getMaxConcurrentCheckpoints()} and {@link #getMinPauseBetweenCheckpoints()}. * * @return The checkpoint interval, in milliseconds. */ public long getCheckpointInterval() { return checkpointInterval; } /** * Sets the interval in which checkpoints are periodically scheduled. * * <p>This setting defines the base interval. Checkpoint triggering may be delayed by the * settings {@link #setMaxConcurrentCheckpoints(int)} and {@link * #setMinPauseBetweenCheckpoints(long)}. * * @param checkpointInterval The checkpoint interval, in milliseconds. */ public void setCheckpointInterval(long checkpointInterval) { if (checkpointInterval < MINIMAL_CHECKPOINT_TIME) { throw new IllegalArgumentException( String.format( "Checkpoint interval must be larger than or equal to %s ms", MINIMAL_CHECKPOINT_TIME)); } this.checkpointInterval = checkpointInterval; } /** * Gets the maximum time that a checkpoint may take before being discarded. * * @return The checkpoint timeout, in milliseconds. */ public long getCheckpointTimeout() { return checkpointTimeout; } /** * Sets the maximum time that a checkpoint may take before being discarded. * * @param checkpointTimeout The checkpoint timeout, in milliseconds. */ public void setCheckpointTimeout(long checkpointTimeout) { if (checkpointTimeout < MINIMAL_CHECKPOINT_TIME) { throw new IllegalArgumentException( String.format( "Checkpoint timeout must be larger than or equal to %s ms", MINIMAL_CHECKPOINT_TIME)); } this.checkpointTimeout = checkpointTimeout; } /** * Gets the minimal pause between checkpointing attempts. This setting defines how soon the * checkpoint coordinator may trigger another checkpoint after it becomes possible to trigger * another checkpoint with respect to the maximum number of concurrent checkpoints (see {@link * #getMaxConcurrentCheckpoints()}). * * @return The minimal pause before the next checkpoint is triggered. */ public long getMinPauseBetweenCheckpoints() { return minPauseBetweenCheckpoints; } /** * Sets the minimal pause between checkpointing attempts. This setting defines how soon the * checkpoint coordinator may trigger another checkpoint after it becomes possible to trigger * another checkpoint with respect to the maximum number of concurrent checkpoints (see {@link * #setMaxConcurrentCheckpoints(int)}). * * <p>If the maximum number of concurrent checkpoints is set to one, this setting makes * effectively sure that a minimum amount of time passes where no checkpoint is in progress at * all. * * @param minPauseBetweenCheckpoints The minimal pause before the next checkpoint is triggered. */ public void setMinPauseBetweenCheckpoints(long minPauseBetweenCheckpoints) { if (minPauseBetweenCheckpoints < 0) { throw new IllegalArgumentException("Pause value must be zero or positive"); } this.minPauseBetweenCheckpoints = minPauseBetweenCheckpoints; } /** * Gets the maximum number of checkpoint attempts that may be in progress at the same time. If * this value is <i>n</i>, then no checkpoints will be triggered while <i>n</i> checkpoint * attempts are currently in flight. For the next checkpoint to be triggered, one checkpoint * attempt would need to finish or expire. * * @return The maximum number of concurrent checkpoint attempts. */ public int getMaxConcurrentCheckpoints() { return maxConcurrentCheckpoints; } /** * Sets the maximum number of checkpoint attempts that may be in progress at the same time. If * this value is <i>n</i>, then no checkpoints will be triggered while <i>n</i> checkpoint * attempts are currently in flight. For the next checkpoint to be triggered, one checkpoint * attempt would need to finish or expire. * * @param maxConcurrentCheckpoints The maximum number of concurrent checkpoint attempts. */ public void setMaxConcurrentCheckpoints(int maxConcurrentCheckpoints) { if (maxConcurrentCheckpoints < 1) { throw new IllegalArgumentException( "The maximum number of concurrent attempts must be at least one."); } this.maxConcurrentCheckpoints = maxConcurrentCheckpoints; } /** * Checks whether checkpointing is forced, despite currently non-checkpointable iteration * feedback. * * @return True, if checkpointing is forced, false otherwise. * @deprecated This will be removed once iterations properly participate in checkpointing. */ @Deprecated @PublicEvolving public boolean isForceCheckpointing() { return forceCheckpointing; } /** * Checks whether checkpointing is forced, despite currently non-checkpointable iteration * feedback. * * @param forceCheckpointing The flag to force checkpointing. * @deprecated This will be removed once iterations properly participate in checkpointing. */ @Deprecated @PublicEvolving public void setForceCheckpointing(boolean forceCheckpointing) { this.forceCheckpointing = forceCheckpointing; } /** * Checks whether Unaligned Checkpoints are forced, despite iteration feedback. * * @return True, if Unaligned Checkpoints are forced, false otherwise. */ @PublicEvolving public boolean isForceUnalignedCheckpoints() { return forceUnalignedCheckpoints; } /** * Checks whether Unaligned Checkpoints are forced, despite currently non-checkpointable * iteration feedback. * * @param forceUnalignedCheckpoints The flag to force checkpointing. */ @PublicEvolving public void setForceUnalignedCheckpoints(boolean forceUnalignedCheckpoints) { this.forceUnalignedCheckpoints = forceUnalignedCheckpoints; } /** * This determines the behaviour when meeting checkpoint errors. If this returns true, which is * equivalent to get tolerableCheckpointFailureNumber as zero, job manager would fail the whole * job once it received a decline checkpoint message. If this returns false, which is equivalent * to get tolerableCheckpointFailureNumber as the maximum of integer (means unlimited), job * manager would not fail the whole job no matter how many declined checkpoints it received. * * @deprecated Use {@link #getTolerableCheckpointFailureNumber()}. */ @Deprecated public boolean isFailOnCheckpointingErrors() { return failOnCheckpointingErrors; } /** * Sets the expected behaviour for tasks in case that they encounter an error when * checkpointing. If this is set as true, which is equivalent to set * tolerableCheckpointFailureNumber as zero, job manager would fail the whole job once it * received a decline checkpoint message. If this is set as false, which is equivalent to set * tolerableCheckpointFailureNumber as the maximum of integer (means unlimited), job manager * would not fail the whole job no matter how many declined checkpoints it received. * * <p>{@link #setTolerableCheckpointFailureNumber(int)} would always overrule this deprecated * method if they have conflicts. * * @deprecated Use {@link #setTolerableCheckpointFailureNumber(int)}. */ @Deprecated public void setFailOnCheckpointingErrors(boolean failOnCheckpointingErrors) { if (tolerableCheckpointFailureNumber != UNDEFINED_TOLERABLE_CHECKPOINT_NUMBER) { LOG.warn( "Since tolerableCheckpointFailureNumber has been configured as {}, deprecated #setFailOnCheckpointingErrors(boolean) " + "method would not take any effect and please use #setTolerableCheckpointFailureNumber(int) method to " + "determine your expected behaviour when checkpoint errors on task side.", tolerableCheckpointFailureNumber); return; } this.failOnCheckpointingErrors = failOnCheckpointingErrors; if (failOnCheckpointingErrors) { this.tolerableCheckpointFailureNumber = 0; } else { this.tolerableCheckpointFailureNumber = UNLIMITED_TOLERABLE_FAILURE_NUMBER; } } /** * Get the tolerable checkpoint failure number which used by the checkpoint failure manager to * determine when we need to fail the job. * * <p>If the {@link #tolerableCheckpointFailureNumber} has not been configured, this method * would return 0 which means the checkpoint failure manager would not tolerate any declined * checkpoint failure. */ public int getTolerableCheckpointFailureNumber() { if (tolerableCheckpointFailureNumber == UNDEFINED_TOLERABLE_CHECKPOINT_NUMBER) { return 0; } return tolerableCheckpointFailureNumber; } /** * Set the tolerable checkpoint failure number, the default value is 0 that means we do not * tolerance any checkpoint failure. */ public void setTolerableCheckpointFailureNumber(int tolerableCheckpointFailureNumber) { if (tolerableCheckpointFailureNumber < 0) { throw new IllegalArgumentException( "The tolerable failure checkpoint number must be non-negative."); } this.tolerableCheckpointFailureNumber = tolerableCheckpointFailureNumber; } /** * Enables checkpoints to be persisted externally. * * <p>Externalized checkpoints write their meta data out to persistent storage and are * <strong>not</strong> automatically cleaned up when the owning job fails or is suspended * (terminating with job status {@link JobStatus#FAILED} or {@link JobStatus#SUSPENDED}). In * this case, you have to manually clean up the checkpoint state, both the meta data and actual * program state. * * <p>The {@link ExternalizedCheckpointCleanup} mode defines how an externalized checkpoint * should be cleaned up on job cancellation. If you choose to retain externalized checkpoints on * cancellation you have you handle checkpoint clean up manually when you cancel the job as well * (terminating with job status {@link JobStatus#CANCELED}). * * <p>The target directory for externalized checkpoints is configured via {@link * org.apache.flink.configuration.CheckpointingOptions#CHECKPOINTS_DIRECTORY}. * * @param cleanupMode Externalized checkpoint cleanup behaviour. */ @PublicEvolving public void enableExternalizedCheckpoints(ExternalizedCheckpointCleanup cleanupMode) { this.externalizedCheckpointCleanup = checkNotNull(cleanupMode); } /** * Returns whether checkpoints should be persisted externally. * * @return <code>true</code> if checkpoints should be externalized. */ @PublicEvolving public boolean isExternalizedCheckpointsEnabled() { return externalizedCheckpointCleanup != null; } /** * Returns whether a job recovery should fallback to checkpoint when there is a more recent * savepoint. * * @return <code>true</code> if a job recovery should fallback to checkpoint. * @deprecated Don't activate prefer checkpoints for recovery because it can lead to data loss * and duplicate output. This option will soon be removed. See <a * href="https://issues.apache.org/jira/browse/FLINK-20427">FLINK-20427</a> for more * information. */ @PublicEvolving @Deprecated public boolean isPreferCheckpointForRecovery() { return preferCheckpointForRecovery; } /** * Sets whether a job recovery should fallback to checkpoint when there is a more recent * savepoint. * * @deprecated Don't activate prefer checkpoints for recovery because it can lead to data loss * and duplicate output. This option will soon be removed. See <a * href="https://issues.apache.org/jira/browse/FLINK-20427">FLINK-20427</a> for more * information. */ @PublicEvolving @Deprecated public void setPreferCheckpointForRecovery(boolean preferCheckpointForRecovery) { this.preferCheckpointForRecovery = preferCheckpointForRecovery; } /** * Enables unaligned checkpoints, which greatly reduce checkpointing times under backpressure. * * <p>Unaligned checkpoints contain data stored in buffers as part of the checkpoint state, * which allows checkpoint barriers to overtake these buffers. Thus, the checkpoint duration * becomes independent of the current throughput as checkpoint barriers are effectively not * embedded into the stream of data anymore. * * <p>Unaligned checkpoints can only be enabled if {@link #checkpointingMode} is {@link * CheckpointingMode#EXACTLY_ONCE}. * * @param enabled Flag to indicate whether unaligned are enabled. */ @PublicEvolving public void enableUnalignedCheckpoints(boolean enabled) { unalignedCheckpointsEnabled = enabled; } /** * Enables unaligned checkpoints, which greatly reduce checkpointing times under backpressure. * * <p>Unaligned checkpoints contain data stored in buffers as part of the checkpoint state, * which allows checkpoint barriers to overtake these buffers. Thus, the checkpoint duration * becomes independent of the current throughput as checkpoint barriers are effectively not * embedded into the stream of data anymore. * * <p>Unaligned checkpoints can only be enabled if {@link #checkpointingMode} is {@link * CheckpointingMode#EXACTLY_ONCE}. */ @PublicEvolving public void enableUnalignedCheckpoints() { enableUnalignedCheckpoints(true); } /** * Returns whether unaligned checkpoints are enabled. * * @return <code>true</code> if unaligned checkpoints are enabled. */ @PublicEvolving public boolean isUnalignedCheckpointsEnabled() { return unalignedCheckpointsEnabled; } /** * Only relevant if {@link #unalignedCheckpointsEnabled} is enabled. * * <p>If {@link #alignmentTimeout} has value equal to <code>0</code>, checkpoints will always * start unaligned. * * <p>If {@link #alignmentTimeout} has value greater then <code>0</code>, checkpoints will start * aligned. If during checkpointing, checkpoint start delay exceeds this {@link * #alignmentTimeout}, alignment will timeout and checkpoint will start working as unaligned * checkpoint. */ @PublicEvolving public void setAlignmentTimeout(long alignmentTimeout) { this.alignmentTimeout = alignmentTimeout; } /** * @return value of alignment timeout, as configured via {@link #setAlignmentTimeout(long)} or * {@link ExecutionCheckpointingOptions#ALIGNMENT_TIMEOUT}. */ @PublicEvolving public long getAlignmentTimeout() { return alignmentTimeout; } /** * Returns whether approximate local recovery is enabled. * * @return <code>true</code> if approximate local recovery is enabled. */ @Experimental public boolean isApproximateLocalRecoveryEnabled() { return approximateLocalRecovery; } /** * Enables the approximate local recovery mode. * * <p>In this recovery mode, when a task fails, the entire downstream of the tasks (including * the failed task) restart. * * <p>Notice that 1. Approximate recovery may lead to data loss. The amount of data which leads * the failed task from the state of the last completed checkpoint to the state when the task * fails is lost. 2. In the next version, we will support restarting the set of failed set of * tasks only. In this version, we only support downstream restarts when a task fails. 3. It is * only an internal feature for now. * * @param enabled Flag to indicate whether approximate local recovery is enabled . */ @Experimental public void enableApproximateLocalRecovery(boolean enabled) { approximateLocalRecovery = enabled; } /** * Returns the cleanup behaviour for externalized checkpoints. * * @return The cleanup behaviour for externalized checkpoints or <code>null</code> if none is * configured. */ @PublicEvolving public ExternalizedCheckpointCleanup getExternalizedCheckpointCleanup() { return externalizedCheckpointCleanup; } /** Cleanup behaviour for externalized checkpoints when the job is cancelled. */ @PublicEvolving public enum ExternalizedCheckpointCleanup { /** * Delete externalized checkpoints on job cancellation. * * <p>All checkpoint state will be deleted when you cancel the owning job, both the meta * data and actual program state. Therefore, you cannot resume from externalized checkpoints * after the job has been cancelled. * * <p>Note that checkpoint state is always kept if the job terminates with state {@link * JobStatus#FAILED}. */ DELETE_ON_CANCELLATION(true), /** * Retain externalized checkpoints on job cancellation. * * <p>All checkpoint state is kept when you cancel the owning job. You have to manually * delete both the checkpoint meta data and actual program state after cancelling the job. * * <p>Note that checkpoint state is always kept if the job terminates with state {@link * JobStatus#FAILED}. */ RETAIN_ON_CANCELLATION(false); private final boolean deleteOnCancellation; ExternalizedCheckpointCleanup(boolean deleteOnCancellation) { this.deleteOnCancellation = deleteOnCancellation; } /** * Returns whether persistent checkpoints shall be discarded on cancellation of the job. * * @return <code>true</code> if persistent checkpoints shall be discarded on cancellation of * the job. */ public boolean deleteOnCancellation() { return deleteOnCancellation; } } /** * Sets all relevant options contained in the {@link ReadableConfig} such as e.g. {@link * ExecutionCheckpointingOptions#CHECKPOINTING_MODE}. * * <p>It will change the value of a setting only if a corresponding option was set in the {@code * configuration}. If a key is not present, the current value of a field will remain untouched. * * @param configuration a configuration to read the values from */ public void configure(ReadableConfig configuration) { configuration .getOptional(ExecutionCheckpointingOptions.CHECKPOINTING_MODE) .ifPresent(this::setCheckpointingMode); configuration .getOptional(ExecutionCheckpointingOptions.CHECKPOINTING_INTERVAL) .ifPresent(i -> this.setCheckpointInterval(i.toMillis())); configuration .getOptional(ExecutionCheckpointingOptions.CHECKPOINTING_TIMEOUT) .ifPresent(t -> this.setCheckpointTimeout(t.toMillis())); configuration .getOptional(ExecutionCheckpointingOptions.MAX_CONCURRENT_CHECKPOINTS) .ifPresent(this::setMaxConcurrentCheckpoints); configuration .getOptional(ExecutionCheckpointingOptions.MIN_PAUSE_BETWEEN_CHECKPOINTS) .ifPresent(m -> this.setMinPauseBetweenCheckpoints(m.toMillis())); configuration .getOptional(ExecutionCheckpointingOptions.PREFER_CHECKPOINT_FOR_RECOVERY) .ifPresent(this::setPreferCheckpointForRecovery); configuration .getOptional(ExecutionCheckpointingOptions.TOLERABLE_FAILURE_NUMBER) .ifPresent(this::setTolerableCheckpointFailureNumber); configuration .getOptional(ExecutionCheckpointingOptions.EXTERNALIZED_CHECKPOINT) .ifPresent(this::enableExternalizedCheckpoints); configuration .getOptional(ExecutionCheckpointingOptions.ENABLE_UNALIGNED) .ifPresent(this::enableUnalignedCheckpoints); configuration .getOptional(ExecutionCheckpointingOptions.ALIGNMENT_TIMEOUT) .ifPresent(timeout -> setAlignmentTimeout(timeout.toMillis())); configuration .getOptional(ExecutionCheckpointingOptions.FORCE_UNALIGNED) .ifPresent(this::setForceUnalignedCheckpoints); } }
apache-2.0
SmarterApp/TechnologyReadinessTool
readiness/src/main/java/net/techreadiness/customer/action/UpdateOrgAction.java
1430
package net.techreadiness.customer.action; import javax.inject.Inject; import net.techreadiness.service.OrganizationService; import net.techreadiness.service.UserService; import net.techreadiness.service.exception.AuthorizationException; import net.techreadiness.service.object.Org; import net.techreadiness.ui.BaseAction; import org.apache.struts2.convention.annotation.Result; import org.apache.struts2.convention.annotation.Results; @Results({ @Result(name = "success", type = "redirect", location = "${returnUrl}") }) public class UpdateOrgAction extends BaseAction { private static final long serialVersionUID = 1L; private Long selectedOrgId; private String returnUrl; @Inject private OrganizationService orgService; @Inject private UserService userService; @Override public String execute() throws Exception { if (selectedOrgId != null) { boolean hasAccess = userService.hasAccessToOrg(getServiceContext(), getServiceContext().getUserId(), selectedOrgId); if (hasAccess) { Org org = orgService.getById(getServiceContext(), selectedOrgId); getServiceContext().setOrg(org); } else { throw new AuthorizationException(); } } return SUCCESS; } public void setSelectedOrgId(Long selectedOrgId) { this.selectedOrgId = selectedOrgId; } public String getReturnUrl() { return returnUrl; } public void setReturnUrl(String returnUrl) { this.returnUrl = returnUrl; } }
apache-2.0
Gaduo/hapi-fhir
hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/BaseHttpClientInvocation.java
4052
package ca.uhn.fhir.rest.client; /* * #%L * HAPI FHIR - Core Library * %% * Copyright (C) 2014 - 2016 University Health Network * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.rest.api.RequestTypeEnum; import ca.uhn.fhir.rest.client.api.Header; import ca.uhn.fhir.rest.client.api.IHttpClient; import ca.uhn.fhir.rest.client.api.IHttpRequest; import ca.uhn.fhir.rest.server.EncodingEnum; public abstract class BaseHttpClientInvocation { private final FhirContext myContext; private final List<Header> myHeaders; public BaseHttpClientInvocation(FhirContext myContext) { this.myContext = myContext; this.myHeaders = new ArrayList<Header>(); } public void addHeader(String theName, String theValue) { myHeaders.add(new Header(theName, theValue)); } /** * Create an HTTP request out of this client request * * @param theUrlBase * The FHIR server base url (with a trailing "/") * @param theExtraParams * Any extra request parameters the server wishes to add * @param theEncoding * The encoding to use for any serialized content sent to the * server */ public abstract IHttpRequest asHttpRequest(String theUrlBase, Map<String, List<String>> theExtraParams, EncodingEnum theEncoding, Boolean thePrettyPrint); /** * Create an HTTP request for the given url, encoding and request-type * * @param theUrl * The complete FHIR url to which the http request will be sent * @param theEncoding * The encoding to use for any serialized content sent to the * server * @param theRequestType * the type of HTTP request (GET, DELETE, ..) */ protected IHttpRequest createHttpRequest(String theUrl, EncodingEnum theEncoding, RequestTypeEnum theRequestType) { IHttpClient httpClient = getRestfulClientFactory().getHttpClient(new StringBuilder(theUrl), null, null, theRequestType, myHeaders); return httpClient.createGetRequest(getContext(), theEncoding); } /** * Returns the FHIR context associated with this client * @return the myContext */ public FhirContext getContext() { return myContext; } /** * Returns the http headers to be sent with the request */ public List<Header> getHeaders() { return myHeaders; } /** * Get the restfull client factory */ public IRestfulClientFactory getRestfulClientFactory() { return myContext.getRestfulClientFactory(); } public static void appendExtraParamsWithQuestionMark(Map<String, List<String>> theExtraParams, StringBuilder theUrlBuilder, boolean theWithQuestionMark) { if (theExtraParams == null) { return; } boolean first = theWithQuestionMark; if (theExtraParams.isEmpty() == false) { for (Entry<String, List<String>> next : theExtraParams.entrySet()) { for (String nextValue : next.getValue()) { if (first) { theUrlBuilder.append('?'); first = false; } else { theUrlBuilder.append('&'); } try { theUrlBuilder.append(URLEncoder.encode(next.getKey(), "UTF-8")); theUrlBuilder.append('='); theUrlBuilder.append(URLEncoder.encode(nextValue, "UTF-8")); } catch (UnsupportedEncodingException e) { throw new Error("UTF-8 not supported - This should not happen"); } } } } } }
apache-2.0
Pyknic/speedment-aggregate-json-example
src/main/java/com/speedment/example/spring/aggregate/config/SpeedmentConfig.java
1831
package com.speedment.example.spring.aggregate.config; import com.speedment.enterprise.datastore.runtime.DataStoreBundle; import com.speedment.enterprise.datastore.runtime.DataStoreComponent; import com.speedment.enterprise.plugins.json.JsonBundle; import com.speedment.enterprise.plugins.json.JsonComponent; import com.speedment.example.spring.aggregate.db.EmployeesApplication; import com.speedment.example.spring.aggregate.db.EmployeesApplicationBuilder; import com.speedment.example.spring.aggregate.db.SalaryManager; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.core.env.Environment; import static java.util.Objects.requireNonNull; /** * @author Emil Forslund * @since 1.0.0 */ @Configuration public class SpeedmentConfig { private final Environment env; SpeedmentConfig(Environment env) { this.env = requireNonNull(env); } @Bean(destroyMethod = "stop") EmployeesApplication getApplication() { return new EmployeesApplicationBuilder() .withConnectionUrl(env.getProperty("spring.datasource.url")) .withUsername(env.getProperty("spring.datasource.username")) .withPassword(env.getProperty("spring.datasource.password")) .withBundle(DataStoreBundle.class) .withBundle(JsonBundle.class) .build(); } @Bean DataStoreComponent getDataStoreComponent(EmployeesApplication app) { return app.getOrThrow(DataStoreComponent.class); } @Bean JsonComponent getJsonComponent(EmployeesApplication app) { return app.getOrThrow(JsonComponent.class); } @Bean SalaryManager getSalaryManager(EmployeesApplication app) { return app.getOrThrow(SalaryManager.class); } }
apache-2.0
AndrewKhitrin/dbeaver
plugins/org.jkiss.dbeaver.ext.mysql/src/org/jkiss/dbeaver/ext/mysql/editors/MySQLSessionEditor.java
6170
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org) * Copyright (C) 2011-2012 Eugene Fradkin (eugene.fradkin@gmail.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ext.mysql.editors; import org.eclipse.jface.action.Action; import org.eclipse.jface.action.IContributionManager; import org.eclipse.jface.action.Separator; import org.eclipse.jface.dialogs.IDialogSettings; import org.eclipse.osgi.util.NLS; import org.eclipse.swt.widgets.Composite; import org.eclipse.ui.ISharedImages; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.ext.mysql.MySQLMessages; import org.jkiss.dbeaver.ext.mysql.model.MySQLDataSource; import org.jkiss.dbeaver.ext.mysql.model.session.MySQLSession; import org.jkiss.dbeaver.ext.mysql.model.session.MySQLSessionManager; import org.jkiss.dbeaver.model.admin.sessions.DBAServerSession; import org.jkiss.dbeaver.model.admin.sessions.DBAServerSessionManager; import org.jkiss.dbeaver.model.exec.DBCExecutionContext; import org.jkiss.dbeaver.ui.ActionUtils; import org.jkiss.dbeaver.ui.DBeaverIcons; import org.jkiss.dbeaver.ui.UIIcon; import org.jkiss.dbeaver.ui.UIUtils; import org.jkiss.dbeaver.ui.views.session.AbstractSessionEditor; import org.jkiss.dbeaver.ui.views.session.SessionManagerViewer; import org.jkiss.utils.CommonUtils; import java.util.Collections; import java.util.List; import java.util.Map; /** * MySQLSessionEditor */ public class MySQLSessionEditor extends AbstractSessionEditor { private static final Log log = Log.getLog(MySQLSessionEditor.class); private KillSessionAction killSessionAction; private KillSessionAction terminateQueryAction; @Override public void createEditorControl(Composite parent) { killSessionAction = new KillSessionAction(false); terminateQueryAction = new KillSessionAction(true); super.createEditorControl(parent); } @Override protected SessionManagerViewer createSessionViewer(DBCExecutionContext executionContext, Composite parent) { return new SessionManagerViewer<MySQLSession>(this, parent, new MySQLSessionManager((MySQLDataSource) executionContext.getDataSource())) { private boolean hideSleeping; @Override protected void contributeToToolbar(DBAServerSessionManager sessionManager, IContributionManager contributionManager) { contributionManager.add(killSessionAction); contributionManager.add(terminateQueryAction); contributionManager.add(new Separator()); contributionManager.add(ActionUtils.makeActionContribution( new Action("Hide sleeping", Action.AS_CHECK_BOX) { { setToolTipText("Show only active connections"); setChecked(hideSleeping); } @Override public void run() { hideSleeping = isChecked(); refreshPart(MySQLSessionEditor.this, true); } }, true)); contributionManager.add(new Separator()); } @Override protected void onSessionSelect(DBAServerSession session) { super.onSessionSelect(session); killSessionAction.setEnabled(session != null); terminateQueryAction.setEnabled(session != null && !CommonUtils.isEmpty(session.getActiveQuery())); } @Override public Map<String, Object> getSessionOptions() { if (hideSleeping) { return Collections.singletonMap(MySQLSessionManager.OPTION_HIDE_SLEEPING, true); } return super.getSessionOptions(); } @Override protected void loadSettings(IDialogSettings settings) { hideSleeping = CommonUtils.toBoolean(settings.get("hideSleeping")); super.loadSettings(settings); } @Override protected void saveSettings(IDialogSettings settings) { super.saveSettings(settings); settings.put("hideSleeping", hideSleeping); } }; } private class KillSessionAction extends Action { private boolean killQuery; public KillSessionAction(boolean killQuery) { super( killQuery ? MySQLMessages.editors_session_editor_action_terminate_Query : MySQLMessages.editors_session_editor_action_kill_Session, killQuery ? UIUtils.getShardImageDescriptor(ISharedImages.IMG_ELCL_STOP) : DBeaverIcons.getImageDescriptor(UIIcon.SQL_DISCONNECT)); this.killQuery = killQuery; } @Override public void run() { final List<DBAServerSession> sessions = getSessionsViewer().getSelectedSessions(); if (sessions != null && UIUtils.confirmAction(getSite().getShell(), this.getText(), NLS.bind(MySQLMessages.editors_session_editor_confirm, getText(), sessions))) { getSessionsViewer().alterSessions( sessions, Collections.singletonMap(MySQLSessionManager.PROP_KILL_QUERY, killQuery)); } } } }
apache-2.0
mdogan/hazelcast
hazelcast/src/main/java/com/hazelcast/config/SplitBrainProtectionConfig.java
10340
/* * Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.config; import com.hazelcast.internal.config.ConfigDataSerializerHook; import com.hazelcast.nio.ObjectDataInput; import com.hazelcast.nio.ObjectDataOutput; import com.hazelcast.nio.serialization.IdentifiedDataSerializable; import com.hazelcast.splitbrainprotection.SplitBrainProtectionFunction; import com.hazelcast.splitbrainprotection.SplitBrainProtectionOn; import com.hazelcast.splitbrainprotection.impl.ProbabilisticSplitBrainProtectionFunction; import com.hazelcast.splitbrainprotection.impl.RecentlyActiveSplitBrainProtectionFunction; import java.io.IOException; import java.util.ArrayList; import java.util.List; import static com.hazelcast.internal.serialization.impl.SerializationUtil.readNullableList; import static com.hazelcast.internal.serialization.impl.SerializationUtil.writeNullableList; import static com.hazelcast.splitbrainprotection.SplitBrainProtectionOn.READ_WRITE; /** * Configuration for cluster split brain protection, a means to protect consistency of data from network * partitions. In this context, split brain protection refers to the number of members in the cluster required * for an operation to succeed. * <p> * Since Hazelcast 3.5, the default built-in split brain protection implementation keeps track of the number of members * in the cluster, as determined by Hazelcast's cluster membership management. * <p> * Since Hazelcast 3.10, two additional built-in split brain protection implementations, decoupled from the existing * cluster membership management, are provided: * <ul> * <li>Probabilistic split brain protection: in this mode, member heartbeats are tracked and an adaptive failure * detector determines for each member the suspicion level. Additionally, when the Hazelcast member * is configured with the ICMP ping failure detector enabled and operating in parallel mode, * ping information is also used to detect member failures early. * <p>To create a {@code SplitBrainProtectionConfig} for probabilistic split brain protection, use * {@link #newProbabilisticSplitBrainProtectionConfigBuilder(String, int)} to configure and build the * {@code SplitBrainProtectionConfig}. * </li> * <li>Recently-active split brain protection: in this mode, for a member to be considered present for split brain * protection, a heartbeat must be received within the configured time-window since now. Additionally, when the * Hazelcast member is configured with the ICMP ping failure detector enabled and operating in * parallel mode, ping information is also used to detect member failures early. * <p>To create a {@code SplitBrainProtectionConfig} for recently-active split brain protection, use * {@link #newRecentlyActiveSplitBrainProtectionConfigBuilder(String, int, int)} to configure and build the * {@code SplitBrainProtectionConfig}. * </li> * </ul> * * @see SplitBrainProtectionFunction * @see ProbabilisticSplitBrainProtectionFunction * @see RecentlyActiveSplitBrainProtectionFunction */ public class SplitBrainProtectionConfig implements IdentifiedDataSerializable, NamedConfig { private String name; private boolean enabled; private int minimumClusterSize; private List<SplitBrainProtectionListenerConfig> listenerConfigs = new ArrayList<SplitBrainProtectionListenerConfig>(); private SplitBrainProtectionOn protectOn = READ_WRITE; private String functionClassName; private SplitBrainProtectionFunction functionImplementation; public SplitBrainProtectionConfig() { } public SplitBrainProtectionConfig(String name, boolean enabled) { this.name = name; this.enabled = enabled; } public SplitBrainProtectionConfig(String name, boolean enabled, int minimumClusterSize) { this.name = name; this.enabled = enabled; this.minimumClusterSize = minimumClusterSize; } public SplitBrainProtectionConfig(SplitBrainProtectionConfig splitBrainProtectionConfig) { this.name = splitBrainProtectionConfig.name; this.enabled = splitBrainProtectionConfig.enabled; this.minimumClusterSize = splitBrainProtectionConfig.minimumClusterSize; this.listenerConfigs = splitBrainProtectionConfig.listenerConfigs; this.protectOn = splitBrainProtectionConfig.protectOn; } public String getName() { return name; } public SplitBrainProtectionConfig setName(String name) { this.name = name; return this; } public boolean isEnabled() { return enabled; } public SplitBrainProtectionConfig setEnabled(boolean enabled) { this.enabled = enabled; return this; } public int getMinimumClusterSize() { return minimumClusterSize; } public SplitBrainProtectionConfig setMinimumClusterSize(int minimumClusterSize) { if (minimumClusterSize < 2) { throw new InvalidConfigurationException("Minimum cluster size configured for split-brain protection" + " cannot be less than 2"); } this.minimumClusterSize = minimumClusterSize; return this; } public SplitBrainProtectionOn getProtectOn() { return protectOn; } public SplitBrainProtectionConfig setProtectOn(SplitBrainProtectionOn protectOn) { this.protectOn = protectOn; return this; } public List<SplitBrainProtectionListenerConfig> getListenerConfigs() { return listenerConfigs; } public SplitBrainProtectionConfig setListenerConfigs(List<SplitBrainProtectionListenerConfig> listenerConfigs) { this.listenerConfigs = listenerConfigs; return this; } public SplitBrainProtectionConfig addListenerConfig(SplitBrainProtectionListenerConfig listenerConfig) { this.listenerConfigs.add(listenerConfig); return this; } public String getFunctionClassName() { return functionClassName; } public SplitBrainProtectionConfig setFunctionClassName(String functionClassName) { this.functionClassName = functionClassName; return this; } public SplitBrainProtectionFunction getFunctionImplementation() { return functionImplementation; } public SplitBrainProtectionConfig setFunctionImplementation(SplitBrainProtectionFunction functionImplementation) { this.functionImplementation = functionImplementation; return this; } @Override public String toString() { return "SplitBrainProtectionConfig{" + "name='" + name + '\'' + ", enabled=" + enabled + ", minimumClusterSize=" + minimumClusterSize + ", listenerConfigs=" + listenerConfigs + ", functionClassName=" + functionClassName + ", functionImplementation=" + functionImplementation + ", protectOn=" + protectOn + '}'; } @Override public int getFactoryId() { return ConfigDataSerializerHook.F_ID; } @Override public int getClassId() { return ConfigDataSerializerHook.SPLIT_BRAIN_PROTECTION_CONFIG; } @Override public void writeData(ObjectDataOutput out) throws IOException { out.writeUTF(name); out.writeBoolean(enabled); out.writeInt(minimumClusterSize); writeNullableList(listenerConfigs, out); out.writeUTF(protectOn.name()); out.writeUTF(functionClassName); out.writeObject(functionImplementation); } @Override public void readData(ObjectDataInput in) throws IOException { name = in.readUTF(); enabled = in.readBoolean(); minimumClusterSize = in.readInt(); listenerConfigs = readNullableList(in); protectOn = SplitBrainProtectionOn.valueOf(in.readUTF()); functionClassName = in.readUTF(); functionImplementation = in.readObject(); } /** * Returns a builder for {@link SplitBrainProtectionConfig} with the given {@code name} using a probabilistic * split brain protection function, for the given split brain protection {@code size} that is enabled by default. * * @param name the split brain protection's name * @param minimumClusterSize minimum count of members in the cluster not to be considered it split. * @see ProbabilisticSplitBrainProtectionFunction */ public static ProbabilisticSplitBrainProtectionConfigBuilder newProbabilisticSplitBrainProtectionConfigBuilder(String name, int minimumClusterSize) { return new ProbabilisticSplitBrainProtectionConfigBuilder(name, minimumClusterSize); } /** * Returns a builder for a {@link SplitBrainProtectionConfig} with the given {@code name} using a recently-active * split brain protection function for the given split brain protection {@code size} that is enabled by default. * @param name the split brain protection's name * @param minimumClusterSize minimum count of members in the cluster not to be considered it split. * @param toleranceMillis maximum amount of milliseconds that may have passed since last heartbeat was received for a * member to be considered present for split brain protection. * @see RecentlyActiveSplitBrainProtectionFunction */ public static RecentlyActiveSplitBrainProtectionConfigBuilder newRecentlyActiveSplitBrainProtectionConfigBuilder(String name, int minimumClusterSize, int toleranceMillis) { return new RecentlyActiveSplitBrainProtectionConfigBuilder(name, minimumClusterSize, toleranceMillis); } }
apache-2.0
ekumenlabs/AndroidStreamingClient
android_streaming_client/src/main/java/com/c77/androidstreamingclient/lib/rtp/buffer/MinDelayRtpMediaBuffer.java
7271
/* * Copyright (C) 2015 Creativa77 SRL and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Contributors: * * Ayelen Chavez ashi@creativa77.com.ar * Julian Cerruti jcerruti@creativa77.com.ar * */ package com.c77.androidstreamingclient.lib.rtp.buffer; import com.biasedbit.efflux.packet.DataPacket; import com.biasedbit.efflux.participant.RtpParticipantInfo; import com.biasedbit.efflux.session.RtpSession; import com.biasedbit.efflux.session.RtpSessionDataListener; import com.c77.androidstreamingclient.lib.rtp.RtpMediaDecoder; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.util.HashMap; import java.util.Map; import java.util.Properties; /** * RTP buffer that sends packets upstream for processing immediately as long as they arrive in order. * <p/> * Approach: a packet will be sent upstream only if it is the one being expected. If a received packet * is newer than the one being expected, it will be stored in order. * If stored packages are older than the configured threshold, they will be discarded. * * @author Julian Cerruti */ public class MinDelayRtpMediaBuffer implements RtpMediaBuffer { public static final String CONFIG_TIMEOUT_MS = "NODELAY_TIMEOUT"; private static final Log log = LogFactory.getLog(MinDelayRtpMediaBuffer.class); // Object that will receive ordered packets private final RtpSessionDataListener upstream; // milliseconds. Wait up to this amount of time for missing packets to arrive. If we start // getting packets newer than this, discard the old ones and restart private long OUT_OF_ORDER_MAX_TIME = 1000; // Temporary cache map of packets received out of order private Map<Integer, DataPacket> packetMap = new HashMap(); private Map<Integer, Long> timestampMap = new HashMap(); private State currentState; private int nextExpectedSequenceNumber; // The timestamp of the last packet we were able to successfully send upstream for processing private long lastProcessedTimestamp; // Keep track of the difference between the packet timestamps and this device's time at the // time we received the first packet private long timestampDifference; /** * Creates a RTP buffer with a given configuration. * * @param upstream object that will receive packets in order * @param configuration if OUT_OF_ORDER_MAX_TIME, its value will replace the default one (1000 ms) */ public MinDelayRtpMediaBuffer(RtpSessionDataListener upstream, Properties configuration) { configuration = (configuration != null) ? configuration : new Properties(); this.upstream = upstream; currentState = State.IDLE; OUT_OF_ORDER_MAX_TIME = Long.parseLong(configuration.getProperty(CONFIG_TIMEOUT_MS, Long.toString(OUT_OF_ORDER_MAX_TIME))); log.info("Using MinDelayRtpMediaBuffer with OUT_OF_ORDER_MAX_TIME = [" + OUT_OF_ORDER_MAX_TIME + "]"); } /** * Does nothing on stop. */ @Override public void stop() { } /** * When a new packet is received, it decides whether to send it to upstream or not. * The sent packets are ordered. * * @param session * @param participant * @param packet */ @Override public void dataPacketReceived(RtpSession session, RtpParticipantInfo participant, DataPacket packet) { if (currentState == State.IDLE) { nextExpectedSequenceNumber = packet.getSequenceNumber(); timestampDifference = System.currentTimeMillis() - packet.getTimestamp() / 90; if (RtpMediaDecoder.DEBUGGING) { log.info("Stream started. Timestamps: " + timestampDifference); } currentState = State.DIRECT; } // If the received packet is the one we were expecting: send it for processing if (packet.getSequenceNumber() == nextExpectedSequenceNumber) { try { upstream.dataPacketReceived(session, participant, packet); } catch (Exception e) { log.error("Error while trying to pass packet to upstream", e); } lastProcessedTimestamp = packet.getTimestamp() / 90; nextExpectedSequenceNumber = packet.getSequenceNumber() + 1; // Also send any subsequent packets that we were buffering! while (packetMap.containsKey(nextExpectedSequenceNumber)) { if (RtpMediaDecoder.DEBUGGING) { log.warn("Sending old buffered packet. #" + nextExpectedSequenceNumber); } DataPacket oldPacket = packetMap.remove(nextExpectedSequenceNumber); timestampMap.remove(nextExpectedSequenceNumber); try { upstream.dataPacketReceived(session, participant, oldPacket); } catch (Exception e) { log.error("Error while trying to pass packet to upstream", e); } lastProcessedTimestamp = oldPacket.getTimestamp() / 90; nextExpectedSequenceNumber = oldPacket.getSequenceNumber() + 1; } } else { // If we are receiving packets that are much newer than what we were waiting for, discard // our buffers and restart from here if (packet.getTimestamp() / 90 - lastProcessedTimestamp > OUT_OF_ORDER_MAX_TIME) { if (RtpMediaDecoder.DEBUGGING) { log.warn("Out of order packets are getting too old. Resetting"); } try { upstream.dataPacketReceived(session, participant, packet); } catch (Exception e) { log.error("Error while trying to pass packet to upstream", e); } lastProcessedTimestamp = packet.getTimestamp() / 90; nextExpectedSequenceNumber = packet.getSequenceNumber() + 1; packetMap.clear(); timestampMap.clear(); // Otherwise, store the packet in the buffer for later } else { if (RtpMediaDecoder.DEBUGGING) { log.warn("Saving out of order packet. #" + packet.getSequenceNumber()); } packetMap.put(packet.getSequenceNumber(), packet); timestampMap.put(packet.getSequenceNumber(), packet.getTimestamp() / 90); } } } /** * State constants. */ private enum State { IDLE, // Just started. Didn't receive any packets yet DIRECT, // No packets out of order pending REORDER, // There are out of order packets waiting to be processed } }
apache-2.0
juwi/hbase
hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
431311
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: WAL.proto package org.apache.hadoop.hbase.protobuf.generated; public final class WALProtos { private WALProtos() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } /** * Protobuf enum {@code hbase.pb.ScopeType} */ public enum ScopeType implements com.google.protobuf.ProtocolMessageEnum { /** * <code>REPLICATION_SCOPE_LOCAL = 0;</code> */ REPLICATION_SCOPE_LOCAL(0, 0), /** * <code>REPLICATION_SCOPE_GLOBAL = 1;</code> */ REPLICATION_SCOPE_GLOBAL(1, 1), ; /** * <code>REPLICATION_SCOPE_LOCAL = 0;</code> */ public static final int REPLICATION_SCOPE_LOCAL_VALUE = 0; /** * <code>REPLICATION_SCOPE_GLOBAL = 1;</code> */ public static final int REPLICATION_SCOPE_GLOBAL_VALUE = 1; public final int getNumber() { return value; } public static ScopeType valueOf(int value) { switch (value) { case 0: return REPLICATION_SCOPE_LOCAL; case 1: return REPLICATION_SCOPE_GLOBAL; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<ScopeType> internalGetValueMap() { return internalValueMap; } private static com.google.protobuf.Internal.EnumLiteMap<ScopeType> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<ScopeType>() { public ScopeType findValueByNumber(int number) { return ScopeType.valueOf(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.getDescriptor().getEnumTypes().get(0); } private static final ScopeType[] VALUES = values(); public static ScopeType valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int index; private final int value; private ScopeType(int index, int value) { this.index = index; this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.ScopeType) } public interface WALHeaderOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional bool has_compression = 1; /** * <code>optional bool has_compression = 1;</code> */ boolean hasHasCompression(); /** * <code>optional bool has_compression = 1;</code> */ boolean getHasCompression(); // optional bytes encryption_key = 2; /** * <code>optional bytes encryption_key = 2;</code> */ boolean hasEncryptionKey(); /** * <code>optional bytes encryption_key = 2;</code> */ com.google.protobuf.ByteString getEncryptionKey(); // optional bool has_tag_compression = 3; /** * <code>optional bool has_tag_compression = 3;</code> */ boolean hasHasTagCompression(); /** * <code>optional bool has_tag_compression = 3;</code> */ boolean getHasTagCompression(); // optional string writer_cls_name = 4; /** * <code>optional string writer_cls_name = 4;</code> */ boolean hasWriterClsName(); /** * <code>optional string writer_cls_name = 4;</code> */ java.lang.String getWriterClsName(); /** * <code>optional string writer_cls_name = 4;</code> */ com.google.protobuf.ByteString getWriterClsNameBytes(); // optional string cell_codec_cls_name = 5; /** * <code>optional string cell_codec_cls_name = 5;</code> */ boolean hasCellCodecClsName(); /** * <code>optional string cell_codec_cls_name = 5;</code> */ java.lang.String getCellCodecClsName(); /** * <code>optional string cell_codec_cls_name = 5;</code> */ com.google.protobuf.ByteString getCellCodecClsNameBytes(); } /** * Protobuf type {@code hbase.pb.WALHeader} */ public static final class WALHeader extends com.google.protobuf.GeneratedMessage implements WALHeaderOrBuilder { // Use WALHeader.newBuilder() to construct. private WALHeader(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private WALHeader(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final WALHeader defaultInstance; public static WALHeader getDefaultInstance() { return defaultInstance; } public WALHeader getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private WALHeader( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; hasCompression_ = input.readBool(); break; } case 18: { bitField0_ |= 0x00000002; encryptionKey_ = input.readBytes(); break; } case 24: { bitField0_ |= 0x00000004; hasTagCompression_ = input.readBool(); break; } case 34: { bitField0_ |= 0x00000008; writerClsName_ = input.readBytes(); break; } case 42: { bitField0_ |= 0x00000010; cellCodecClsName_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.Builder.class); } public static com.google.protobuf.Parser<WALHeader> PARSER = new com.google.protobuf.AbstractParser<WALHeader>() { public WALHeader parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new WALHeader(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<WALHeader> getParserForType() { return PARSER; } private int bitField0_; // optional bool has_compression = 1; public static final int HAS_COMPRESSION_FIELD_NUMBER = 1; private boolean hasCompression_; /** * <code>optional bool has_compression = 1;</code> */ public boolean hasHasCompression() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bool has_compression = 1;</code> */ public boolean getHasCompression() { return hasCompression_; } // optional bytes encryption_key = 2; public static final int ENCRYPTION_KEY_FIELD_NUMBER = 2; private com.google.protobuf.ByteString encryptionKey_; /** * <code>optional bytes encryption_key = 2;</code> */ public boolean hasEncryptionKey() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes encryption_key = 2;</code> */ public com.google.protobuf.ByteString getEncryptionKey() { return encryptionKey_; } // optional bool has_tag_compression = 3; public static final int HAS_TAG_COMPRESSION_FIELD_NUMBER = 3; private boolean hasTagCompression_; /** * <code>optional bool has_tag_compression = 3;</code> */ public boolean hasHasTagCompression() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bool has_tag_compression = 3;</code> */ public boolean getHasTagCompression() { return hasTagCompression_; } // optional string writer_cls_name = 4; public static final int WRITER_CLS_NAME_FIELD_NUMBER = 4; private java.lang.Object writerClsName_; /** * <code>optional string writer_cls_name = 4;</code> */ public boolean hasWriterClsName() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional string writer_cls_name = 4;</code> */ public java.lang.String getWriterClsName() { java.lang.Object ref = writerClsName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { writerClsName_ = s; } return s; } } /** * <code>optional string writer_cls_name = 4;</code> */ public com.google.protobuf.ByteString getWriterClsNameBytes() { java.lang.Object ref = writerClsName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); writerClsName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional string cell_codec_cls_name = 5; public static final int CELL_CODEC_CLS_NAME_FIELD_NUMBER = 5; private java.lang.Object cellCodecClsName_; /** * <code>optional string cell_codec_cls_name = 5;</code> */ public boolean hasCellCodecClsName() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional string cell_codec_cls_name = 5;</code> */ public java.lang.String getCellCodecClsName() { java.lang.Object ref = cellCodecClsName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { cellCodecClsName_ = s; } return s; } } /** * <code>optional string cell_codec_cls_name = 5;</code> */ public com.google.protobuf.ByteString getCellCodecClsNameBytes() { java.lang.Object ref = cellCodecClsName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); cellCodecClsName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private void initFields() { hasCompression_ = false; encryptionKey_ = com.google.protobuf.ByteString.EMPTY; hasTagCompression_ = false; writerClsName_ = ""; cellCodecClsName_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, hasCompression_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, encryptionKey_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBool(3, hasTagCompression_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBytes(4, getWriterClsNameBytes()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBytes(5, getCellCodecClsNameBytes()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, hasCompression_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, encryptionKey_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(3, hasTagCompression_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(4, getWriterClsNameBytes()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(5, getCellCodecClsNameBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader) obj; boolean result = true; result = result && (hasHasCompression() == other.hasHasCompression()); if (hasHasCompression()) { result = result && (getHasCompression() == other.getHasCompression()); } result = result && (hasEncryptionKey() == other.hasEncryptionKey()); if (hasEncryptionKey()) { result = result && getEncryptionKey() .equals(other.getEncryptionKey()); } result = result && (hasHasTagCompression() == other.hasHasTagCompression()); if (hasHasTagCompression()) { result = result && (getHasTagCompression() == other.getHasTagCompression()); } result = result && (hasWriterClsName() == other.hasWriterClsName()); if (hasWriterClsName()) { result = result && getWriterClsName() .equals(other.getWriterClsName()); } result = result && (hasCellCodecClsName() == other.hasCellCodecClsName()); if (hasCellCodecClsName()) { result = result && getCellCodecClsName() .equals(other.getCellCodecClsName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasHasCompression()) { hash = (37 * hash) + HAS_COMPRESSION_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getHasCompression()); } if (hasEncryptionKey()) { hash = (37 * hash) + ENCRYPTION_KEY_FIELD_NUMBER; hash = (53 * hash) + getEncryptionKey().hashCode(); } if (hasHasTagCompression()) { hash = (37 * hash) + HAS_TAG_COMPRESSION_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getHasTagCompression()); } if (hasWriterClsName()) { hash = (37 * hash) + WRITER_CLS_NAME_FIELD_NUMBER; hash = (53 * hash) + getWriterClsName().hashCode(); } if (hasCellCodecClsName()) { hash = (37 * hash) + CELL_CODEC_CLS_NAME_FIELD_NUMBER; hash = (53 * hash) + getCellCodecClsName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.WALHeader} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeaderOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); hasCompression_ = false; bitField0_ = (bitField0_ & ~0x00000001); encryptionKey_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); hasTagCompression_ = false; bitField0_ = (bitField0_ & ~0x00000004); writerClsName_ = ""; bitField0_ = (bitField0_ & ~0x00000008); cellCodecClsName_ = ""; bitField0_ = (bitField0_ & ~0x00000010); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader build() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader buildPartial() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.hasCompression_ = hasCompression_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.encryptionKey_ = encryptionKey_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.hasTagCompression_ = hasTagCompression_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.writerClsName_ = writerClsName_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } result.cellCodecClsName_ = cellCodecClsName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader other) { if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.getDefaultInstance()) return this; if (other.hasHasCompression()) { setHasCompression(other.getHasCompression()); } if (other.hasEncryptionKey()) { setEncryptionKey(other.getEncryptionKey()); } if (other.hasHasTagCompression()) { setHasTagCompression(other.getHasTagCompression()); } if (other.hasWriterClsName()) { bitField0_ |= 0x00000008; writerClsName_ = other.writerClsName_; onChanged(); } if (other.hasCellCodecClsName()) { bitField0_ |= 0x00000010; cellCodecClsName_ = other.cellCodecClsName_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional bool has_compression = 1; private boolean hasCompression_ ; /** * <code>optional bool has_compression = 1;</code> */ public boolean hasHasCompression() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bool has_compression = 1;</code> */ public boolean getHasCompression() { return hasCompression_; } /** * <code>optional bool has_compression = 1;</code> */ public Builder setHasCompression(boolean value) { bitField0_ |= 0x00000001; hasCompression_ = value; onChanged(); return this; } /** * <code>optional bool has_compression = 1;</code> */ public Builder clearHasCompression() { bitField0_ = (bitField0_ & ~0x00000001); hasCompression_ = false; onChanged(); return this; } // optional bytes encryption_key = 2; private com.google.protobuf.ByteString encryptionKey_ = com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes encryption_key = 2;</code> */ public boolean hasEncryptionKey() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional bytes encryption_key = 2;</code> */ public com.google.protobuf.ByteString getEncryptionKey() { return encryptionKey_; } /** * <code>optional bytes encryption_key = 2;</code> */ public Builder setEncryptionKey(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; encryptionKey_ = value; onChanged(); return this; } /** * <code>optional bytes encryption_key = 2;</code> */ public Builder clearEncryptionKey() { bitField0_ = (bitField0_ & ~0x00000002); encryptionKey_ = getDefaultInstance().getEncryptionKey(); onChanged(); return this; } // optional bool has_tag_compression = 3; private boolean hasTagCompression_ ; /** * <code>optional bool has_tag_compression = 3;</code> */ public boolean hasHasTagCompression() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional bool has_tag_compression = 3;</code> */ public boolean getHasTagCompression() { return hasTagCompression_; } /** * <code>optional bool has_tag_compression = 3;</code> */ public Builder setHasTagCompression(boolean value) { bitField0_ |= 0x00000004; hasTagCompression_ = value; onChanged(); return this; } /** * <code>optional bool has_tag_compression = 3;</code> */ public Builder clearHasTagCompression() { bitField0_ = (bitField0_ & ~0x00000004); hasTagCompression_ = false; onChanged(); return this; } // optional string writer_cls_name = 4; private java.lang.Object writerClsName_ = ""; /** * <code>optional string writer_cls_name = 4;</code> */ public boolean hasWriterClsName() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional string writer_cls_name = 4;</code> */ public java.lang.String getWriterClsName() { java.lang.Object ref = writerClsName_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); writerClsName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string writer_cls_name = 4;</code> */ public com.google.protobuf.ByteString getWriterClsNameBytes() { java.lang.Object ref = writerClsName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); writerClsName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string writer_cls_name = 4;</code> */ public Builder setWriterClsName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; writerClsName_ = value; onChanged(); return this; } /** * <code>optional string writer_cls_name = 4;</code> */ public Builder clearWriterClsName() { bitField0_ = (bitField0_ & ~0x00000008); writerClsName_ = getDefaultInstance().getWriterClsName(); onChanged(); return this; } /** * <code>optional string writer_cls_name = 4;</code> */ public Builder setWriterClsNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; writerClsName_ = value; onChanged(); return this; } // optional string cell_codec_cls_name = 5; private java.lang.Object cellCodecClsName_ = ""; /** * <code>optional string cell_codec_cls_name = 5;</code> */ public boolean hasCellCodecClsName() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional string cell_codec_cls_name = 5;</code> */ public java.lang.String getCellCodecClsName() { java.lang.Object ref = cellCodecClsName_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); cellCodecClsName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>optional string cell_codec_cls_name = 5;</code> */ public com.google.protobuf.ByteString getCellCodecClsNameBytes() { java.lang.Object ref = cellCodecClsName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); cellCodecClsName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string cell_codec_cls_name = 5;</code> */ public Builder setCellCodecClsName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; cellCodecClsName_ = value; onChanged(); return this; } /** * <code>optional string cell_codec_cls_name = 5;</code> */ public Builder clearCellCodecClsName() { bitField0_ = (bitField0_ & ~0x00000010); cellCodecClsName_ = getDefaultInstance().getCellCodecClsName(); onChanged(); return this; } /** * <code>optional string cell_codec_cls_name = 5;</code> */ public Builder setCellCodecClsNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000010; cellCodecClsName_ = value; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.WALHeader) } static { defaultInstance = new WALHeader(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.WALHeader) } public interface WALKeyOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bytes encoded_region_name = 1; /** * <code>required bytes encoded_region_name = 1;</code> */ boolean hasEncodedRegionName(); /** * <code>required bytes encoded_region_name = 1;</code> */ com.google.protobuf.ByteString getEncodedRegionName(); // required bytes table_name = 2; /** * <code>required bytes table_name = 2;</code> */ boolean hasTableName(); /** * <code>required bytes table_name = 2;</code> */ com.google.protobuf.ByteString getTableName(); // required uint64 log_sequence_number = 3; /** * <code>required uint64 log_sequence_number = 3;</code> */ boolean hasLogSequenceNumber(); /** * <code>required uint64 log_sequence_number = 3;</code> */ long getLogSequenceNumber(); // required uint64 write_time = 4; /** * <code>required uint64 write_time = 4;</code> */ boolean hasWriteTime(); /** * <code>required uint64 write_time = 4;</code> */ long getWriteTime(); // optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; /** * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> * * <pre> * *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> */ @java.lang.Deprecated boolean hasClusterId(); /** * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> * * <pre> * *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> */ @java.lang.Deprecated org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterId(); /** * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> * * <pre> * *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> */ @java.lang.Deprecated org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder(); // repeated .hbase.pb.FamilyScope scopes = 6; /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> getScopesList(); /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope getScopes(int index); /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ int getScopesCount(); /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder> getScopesOrBuilderList(); /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder getScopesOrBuilder( int index); // optional uint32 following_kv_count = 7; /** * <code>optional uint32 following_kv_count = 7;</code> */ boolean hasFollowingKvCount(); /** * <code>optional uint32 following_kv_count = 7;</code> */ int getFollowingKvCount(); // repeated .hbase.pb.UUID cluster_ids = 8; /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID> getClusterIdsList(); /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterIds(int index); /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ int getClusterIdsCount(); /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder> getClusterIdsOrBuilderList(); /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdsOrBuilder( int index); // optional uint64 nonceGroup = 9; /** * <code>optional uint64 nonceGroup = 9;</code> */ boolean hasNonceGroup(); /** * <code>optional uint64 nonceGroup = 9;</code> */ long getNonceGroup(); // optional uint64 nonce = 10; /** * <code>optional uint64 nonce = 10;</code> */ boolean hasNonce(); /** * <code>optional uint64 nonce = 10;</code> */ long getNonce(); // optional uint64 orig_sequence_number = 11; /** * <code>optional uint64 orig_sequence_number = 11;</code> */ boolean hasOrigSequenceNumber(); /** * <code>optional uint64 orig_sequence_number = 11;</code> */ long getOrigSequenceNumber(); } /** * Protobuf type {@code hbase.pb.WALKey} * * <pre> * * Protocol buffer version of WALKey; see WALKey comment, not really a key but WALEdit header * for some KVs * </pre> */ public static final class WALKey extends com.google.protobuf.GeneratedMessage implements WALKeyOrBuilder { // Use WALKey.newBuilder() to construct. private WALKey(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private WALKey(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final WALKey defaultInstance; public static WALKey getDefaultInstance() { return defaultInstance; } public WALKey getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private WALKey( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; encodedRegionName_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; tableName_ = input.readBytes(); break; } case 24: { bitField0_ |= 0x00000004; logSequenceNumber_ = input.readUInt64(); break; } case 32: { bitField0_ |= 0x00000008; writeTime_ = input.readUInt64(); break; } case 42: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder subBuilder = null; if (((bitField0_ & 0x00000010) == 0x00000010)) { subBuilder = clusterId_.toBuilder(); } clusterId_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(clusterId_); clusterId_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000010; break; } case 50: { if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) { scopes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope>(); mutable_bitField0_ |= 0x00000020; } scopes_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.PARSER, extensionRegistry)); break; } case 56: { bitField0_ |= 0x00000020; followingKvCount_ = input.readUInt32(); break; } case 66: { if (!((mutable_bitField0_ & 0x00000080) == 0x00000080)) { clusterIds_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID>(); mutable_bitField0_ |= 0x00000080; } clusterIds_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.PARSER, extensionRegistry)); break; } case 72: { bitField0_ |= 0x00000040; nonceGroup_ = input.readUInt64(); break; } case 80: { bitField0_ |= 0x00000080; nonce_ = input.readUInt64(); break; } case 88: { bitField0_ |= 0x00000100; origSequenceNumber_ = input.readUInt64(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) { scopes_ = java.util.Collections.unmodifiableList(scopes_); } if (((mutable_bitField0_ & 0x00000080) == 0x00000080)) { clusterIds_ = java.util.Collections.unmodifiableList(clusterIds_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder.class); } public static com.google.protobuf.Parser<WALKey> PARSER = new com.google.protobuf.AbstractParser<WALKey>() { public WALKey parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new WALKey(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<WALKey> getParserForType() { return PARSER; } private int bitField0_; // required bytes encoded_region_name = 1; public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString encodedRegionName_; /** * <code>required bytes encoded_region_name = 1;</code> */ public boolean hasEncodedRegionName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes encoded_region_name = 1;</code> */ public com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } // required bytes table_name = 2; public static final int TABLE_NAME_FIELD_NUMBER = 2; private com.google.protobuf.ByteString tableName_; /** * <code>required bytes table_name = 2;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes table_name = 2;</code> */ public com.google.protobuf.ByteString getTableName() { return tableName_; } // required uint64 log_sequence_number = 3; public static final int LOG_SEQUENCE_NUMBER_FIELD_NUMBER = 3; private long logSequenceNumber_; /** * <code>required uint64 log_sequence_number = 3;</code> */ public boolean hasLogSequenceNumber() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required uint64 log_sequence_number = 3;</code> */ public long getLogSequenceNumber() { return logSequenceNumber_; } // required uint64 write_time = 4; public static final int WRITE_TIME_FIELD_NUMBER = 4; private long writeTime_; /** * <code>required uint64 write_time = 4;</code> */ public boolean hasWriteTime() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>required uint64 write_time = 4;</code> */ public long getWriteTime() { return writeTime_; } // optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; public static final int CLUSTER_ID_FIELD_NUMBER = 5; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID clusterId_; /** * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> * * <pre> * *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> */ @java.lang.Deprecated public boolean hasClusterId() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> * * <pre> * *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> */ @java.lang.Deprecated public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterId() { return clusterId_; } /** * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> * * <pre> * *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> */ @java.lang.Deprecated public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder() { return clusterId_; } // repeated .hbase.pb.FamilyScope scopes = 6; public static final int SCOPES_FIELD_NUMBER = 6; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> scopes_; /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> getScopesList() { return scopes_; } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder> getScopesOrBuilderList() { return scopes_; } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public int getScopesCount() { return scopes_.size(); } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope getScopes(int index) { return scopes_.get(index); } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder getScopesOrBuilder( int index) { return scopes_.get(index); } // optional uint32 following_kv_count = 7; public static final int FOLLOWING_KV_COUNT_FIELD_NUMBER = 7; private int followingKvCount_; /** * <code>optional uint32 following_kv_count = 7;</code> */ public boolean hasFollowingKvCount() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional uint32 following_kv_count = 7;</code> */ public int getFollowingKvCount() { return followingKvCount_; } // repeated .hbase.pb.UUID cluster_ids = 8; public static final int CLUSTER_IDS_FIELD_NUMBER = 8; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID> clusterIds_; /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID> getClusterIdsList() { return clusterIds_; } /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder> getClusterIdsOrBuilderList() { return clusterIds_; } /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public int getClusterIdsCount() { return clusterIds_.size(); } /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterIds(int index) { return clusterIds_.get(index); } /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdsOrBuilder( int index) { return clusterIds_.get(index); } // optional uint64 nonceGroup = 9; public static final int NONCEGROUP_FIELD_NUMBER = 9; private long nonceGroup_; /** * <code>optional uint64 nonceGroup = 9;</code> */ public boolean hasNonceGroup() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional uint64 nonceGroup = 9;</code> */ public long getNonceGroup() { return nonceGroup_; } // optional uint64 nonce = 10; public static final int NONCE_FIELD_NUMBER = 10; private long nonce_; /** * <code>optional uint64 nonce = 10;</code> */ public boolean hasNonce() { return ((bitField0_ & 0x00000080) == 0x00000080); } /** * <code>optional uint64 nonce = 10;</code> */ public long getNonce() { return nonce_; } // optional uint64 orig_sequence_number = 11; public static final int ORIG_SEQUENCE_NUMBER_FIELD_NUMBER = 11; private long origSequenceNumber_; /** * <code>optional uint64 orig_sequence_number = 11;</code> */ public boolean hasOrigSequenceNumber() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** * <code>optional uint64 orig_sequence_number = 11;</code> */ public long getOrigSequenceNumber() { return origSequenceNumber_; } private void initFields() { encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; tableName_ = com.google.protobuf.ByteString.EMPTY; logSequenceNumber_ = 0L; writeTime_ = 0L; clusterId_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance(); scopes_ = java.util.Collections.emptyList(); followingKvCount_ = 0; clusterIds_ = java.util.Collections.emptyList(); nonceGroup_ = 0L; nonce_ = 0L; origSequenceNumber_ = 0L; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasEncodedRegionName()) { memoizedIsInitialized = 0; return false; } if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!hasLogSequenceNumber()) { memoizedIsInitialized = 0; return false; } if (!hasWriteTime()) { memoizedIsInitialized = 0; return false; } if (hasClusterId()) { if (!getClusterId().isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getScopesCount(); i++) { if (!getScopes(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } for (int i = 0; i < getClusterIdsCount(); i++) { if (!getClusterIds(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, encodedRegionName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, tableName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, logSequenceNumber_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeUInt64(4, writeTime_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeMessage(5, clusterId_); } for (int i = 0; i < scopes_.size(); i++) { output.writeMessage(6, scopes_.get(i)); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeUInt32(7, followingKvCount_); } for (int i = 0; i < clusterIds_.size(); i++) { output.writeMessage(8, clusterIds_.get(i)); } if (((bitField0_ & 0x00000040) == 0x00000040)) { output.writeUInt64(9, nonceGroup_); } if (((bitField0_ & 0x00000080) == 0x00000080)) { output.writeUInt64(10, nonce_); } if (((bitField0_ & 0x00000100) == 0x00000100)) { output.writeUInt64(11, origSequenceNumber_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, encodedRegionName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, tableName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(3, logSequenceNumber_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(4, writeTime_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(5, clusterId_); } for (int i = 0; i < scopes_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(6, scopes_.get(i)); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(7, followingKvCount_); } for (int i = 0; i < clusterIds_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(8, clusterIds_.get(i)); } if (((bitField0_ & 0x00000040) == 0x00000040)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(9, nonceGroup_); } if (((bitField0_ & 0x00000080) == 0x00000080)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(10, nonce_); } if (((bitField0_ & 0x00000100) == 0x00000100)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(11, origSequenceNumber_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey) obj; boolean result = true; result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); if (hasEncodedRegionName()) { result = result && getEncodedRegionName() .equals(other.getEncodedRegionName()); } result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber()); if (hasLogSequenceNumber()) { result = result && (getLogSequenceNumber() == other.getLogSequenceNumber()); } result = result && (hasWriteTime() == other.hasWriteTime()); if (hasWriteTime()) { result = result && (getWriteTime() == other.getWriteTime()); } result = result && (hasClusterId() == other.hasClusterId()); if (hasClusterId()) { result = result && getClusterId() .equals(other.getClusterId()); } result = result && getScopesList() .equals(other.getScopesList()); result = result && (hasFollowingKvCount() == other.hasFollowingKvCount()); if (hasFollowingKvCount()) { result = result && (getFollowingKvCount() == other.getFollowingKvCount()); } result = result && getClusterIdsList() .equals(other.getClusterIdsList()); result = result && (hasNonceGroup() == other.hasNonceGroup()); if (hasNonceGroup()) { result = result && (getNonceGroup() == other.getNonceGroup()); } result = result && (hasNonce() == other.hasNonce()); if (hasNonce()) { result = result && (getNonce() == other.getNonce()); } result = result && (hasOrigSequenceNumber() == other.hasOrigSequenceNumber()); if (hasOrigSequenceNumber()) { result = result && (getOrigSequenceNumber() == other.getOrigSequenceNumber()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasEncodedRegionName()) { hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getEncodedRegionName().hashCode(); } if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (hasLogSequenceNumber()) { hash = (37 * hash) + LOG_SEQUENCE_NUMBER_FIELD_NUMBER; hash = (53 * hash) + hashLong(getLogSequenceNumber()); } if (hasWriteTime()) { hash = (37 * hash) + WRITE_TIME_FIELD_NUMBER; hash = (53 * hash) + hashLong(getWriteTime()); } if (hasClusterId()) { hash = (37 * hash) + CLUSTER_ID_FIELD_NUMBER; hash = (53 * hash) + getClusterId().hashCode(); } if (getScopesCount() > 0) { hash = (37 * hash) + SCOPES_FIELD_NUMBER; hash = (53 * hash) + getScopesList().hashCode(); } if (hasFollowingKvCount()) { hash = (37 * hash) + FOLLOWING_KV_COUNT_FIELD_NUMBER; hash = (53 * hash) + getFollowingKvCount(); } if (getClusterIdsCount() > 0) { hash = (37 * hash) + CLUSTER_IDS_FIELD_NUMBER; hash = (53 * hash) + getClusterIdsList().hashCode(); } if (hasNonceGroup()) { hash = (37 * hash) + NONCEGROUP_FIELD_NUMBER; hash = (53 * hash) + hashLong(getNonceGroup()); } if (hasNonce()) { hash = (37 * hash) + NONCE_FIELD_NUMBER; hash = (53 * hash) + hashLong(getNonce()); } if (hasOrigSequenceNumber()) { hash = (37 * hash) + ORIG_SEQUENCE_NUMBER_FIELD_NUMBER; hash = (53 * hash) + hashLong(getOrigSequenceNumber()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.WALKey} * * <pre> * * Protocol buffer version of WALKey; see WALKey comment, not really a key but WALEdit header * for some KVs * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getClusterIdFieldBuilder(); getScopesFieldBuilder(); getClusterIdsFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); tableName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); logSequenceNumber_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); writeTime_ = 0L; bitField0_ = (bitField0_ & ~0x00000008); if (clusterIdBuilder_ == null) { clusterId_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance(); } else { clusterIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); if (scopesBuilder_ == null) { scopes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000020); } else { scopesBuilder_.clear(); } followingKvCount_ = 0; bitField0_ = (bitField0_ & ~0x00000040); if (clusterIdsBuilder_ == null) { clusterIds_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000080); } else { clusterIdsBuilder_.clear(); } nonceGroup_ = 0L; bitField0_ = (bitField0_ & ~0x00000100); nonce_ = 0L; bitField0_ = (bitField0_ & ~0x00000200); origSequenceNumber_ = 0L; bitField0_ = (bitField0_ & ~0x00000400); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey build() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey buildPartial() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.encodedRegionName_ = encodedRegionName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.tableName_ = tableName_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.logSequenceNumber_ = logSequenceNumber_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.writeTime_ = writeTime_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } if (clusterIdBuilder_ == null) { result.clusterId_ = clusterId_; } else { result.clusterId_ = clusterIdBuilder_.build(); } if (scopesBuilder_ == null) { if (((bitField0_ & 0x00000020) == 0x00000020)) { scopes_ = java.util.Collections.unmodifiableList(scopes_); bitField0_ = (bitField0_ & ~0x00000020); } result.scopes_ = scopes_; } else { result.scopes_ = scopesBuilder_.build(); } if (((from_bitField0_ & 0x00000040) == 0x00000040)) { to_bitField0_ |= 0x00000020; } result.followingKvCount_ = followingKvCount_; if (clusterIdsBuilder_ == null) { if (((bitField0_ & 0x00000080) == 0x00000080)) { clusterIds_ = java.util.Collections.unmodifiableList(clusterIds_); bitField0_ = (bitField0_ & ~0x00000080); } result.clusterIds_ = clusterIds_; } else { result.clusterIds_ = clusterIdsBuilder_.build(); } if (((from_bitField0_ & 0x00000100) == 0x00000100)) { to_bitField0_ |= 0x00000040; } result.nonceGroup_ = nonceGroup_; if (((from_bitField0_ & 0x00000200) == 0x00000200)) { to_bitField0_ |= 0x00000080; } result.nonce_ = nonce_; if (((from_bitField0_ & 0x00000400) == 0x00000400)) { to_bitField0_ |= 0x00000100; } result.origSequenceNumber_ = origSequenceNumber_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey other) { if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance()) return this; if (other.hasEncodedRegionName()) { setEncodedRegionName(other.getEncodedRegionName()); } if (other.hasTableName()) { setTableName(other.getTableName()); } if (other.hasLogSequenceNumber()) { setLogSequenceNumber(other.getLogSequenceNumber()); } if (other.hasWriteTime()) { setWriteTime(other.getWriteTime()); } if (other.hasClusterId()) { mergeClusterId(other.getClusterId()); } if (scopesBuilder_ == null) { if (!other.scopes_.isEmpty()) { if (scopes_.isEmpty()) { scopes_ = other.scopes_; bitField0_ = (bitField0_ & ~0x00000020); } else { ensureScopesIsMutable(); scopes_.addAll(other.scopes_); } onChanged(); } } else { if (!other.scopes_.isEmpty()) { if (scopesBuilder_.isEmpty()) { scopesBuilder_.dispose(); scopesBuilder_ = null; scopes_ = other.scopes_; bitField0_ = (bitField0_ & ~0x00000020); scopesBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getScopesFieldBuilder() : null; } else { scopesBuilder_.addAllMessages(other.scopes_); } } } if (other.hasFollowingKvCount()) { setFollowingKvCount(other.getFollowingKvCount()); } if (clusterIdsBuilder_ == null) { if (!other.clusterIds_.isEmpty()) { if (clusterIds_.isEmpty()) { clusterIds_ = other.clusterIds_; bitField0_ = (bitField0_ & ~0x00000080); } else { ensureClusterIdsIsMutable(); clusterIds_.addAll(other.clusterIds_); } onChanged(); } } else { if (!other.clusterIds_.isEmpty()) { if (clusterIdsBuilder_.isEmpty()) { clusterIdsBuilder_.dispose(); clusterIdsBuilder_ = null; clusterIds_ = other.clusterIds_; bitField0_ = (bitField0_ & ~0x00000080); clusterIdsBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getClusterIdsFieldBuilder() : null; } else { clusterIdsBuilder_.addAllMessages(other.clusterIds_); } } } if (other.hasNonceGroup()) { setNonceGroup(other.getNonceGroup()); } if (other.hasNonce()) { setNonce(other.getNonce()); } if (other.hasOrigSequenceNumber()) { setOrigSequenceNumber(other.getOrigSequenceNumber()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasEncodedRegionName()) { return false; } if (!hasTableName()) { return false; } if (!hasLogSequenceNumber()) { return false; } if (!hasWriteTime()) { return false; } if (hasClusterId()) { if (!getClusterId().isInitialized()) { return false; } } for (int i = 0; i < getScopesCount(); i++) { if (!getScopes(i).isInitialized()) { return false; } } for (int i = 0; i < getClusterIdsCount(); i++) { if (!getClusterIds(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required bytes encoded_region_name = 1; private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes encoded_region_name = 1;</code> */ public boolean hasEncodedRegionName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes encoded_region_name = 1;</code> */ public com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } /** * <code>required bytes encoded_region_name = 1;</code> */ public Builder setEncodedRegionName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; encodedRegionName_ = value; onChanged(); return this; } /** * <code>required bytes encoded_region_name = 1;</code> */ public Builder clearEncodedRegionName() { bitField0_ = (bitField0_ & ~0x00000001); encodedRegionName_ = getDefaultInstance().getEncodedRegionName(); onChanged(); return this; } // required bytes table_name = 2; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes table_name = 2;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes table_name = 2;</code> */ public com.google.protobuf.ByteString getTableName() { return tableName_; } /** * <code>required bytes table_name = 2;</code> */ public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; tableName_ = value; onChanged(); return this; } /** * <code>required bytes table_name = 2;</code> */ public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000002); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } // required uint64 log_sequence_number = 3; private long logSequenceNumber_ ; /** * <code>required uint64 log_sequence_number = 3;</code> */ public boolean hasLogSequenceNumber() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required uint64 log_sequence_number = 3;</code> */ public long getLogSequenceNumber() { return logSequenceNumber_; } /** * <code>required uint64 log_sequence_number = 3;</code> */ public Builder setLogSequenceNumber(long value) { bitField0_ |= 0x00000004; logSequenceNumber_ = value; onChanged(); return this; } /** * <code>required uint64 log_sequence_number = 3;</code> */ public Builder clearLogSequenceNumber() { bitField0_ = (bitField0_ & ~0x00000004); logSequenceNumber_ = 0L; onChanged(); return this; } // required uint64 write_time = 4; private long writeTime_ ; /** * <code>required uint64 write_time = 4;</code> */ public boolean hasWriteTime() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>required uint64 write_time = 4;</code> */ public long getWriteTime() { return writeTime_; } /** * <code>required uint64 write_time = 4;</code> */ public Builder setWriteTime(long value) { bitField0_ |= 0x00000008; writeTime_ = value; onChanged(); return this; } /** * <code>required uint64 write_time = 4;</code> */ public Builder clearWriteTime() { bitField0_ = (bitField0_ & ~0x00000008); writeTime_ = 0L; onChanged(); return this; } // optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID clusterId_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder> clusterIdBuilder_; /** * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> * * <pre> * *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> */ @java.lang.Deprecated public boolean hasClusterId() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> * * <pre> * *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> */ @java.lang.Deprecated public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterId() { if (clusterIdBuilder_ == null) { return clusterId_; } else { return clusterIdBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> * * <pre> * *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> */ @java.lang.Deprecated public Builder setClusterId(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID value) { if (clusterIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } clusterId_ = value; onChanged(); } else { clusterIdBuilder_.setMessage(value); } bitField0_ |= 0x00000010; return this; } /** * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> * * <pre> * *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> */ @java.lang.Deprecated public Builder setClusterId( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder builderForValue) { if (clusterIdBuilder_ == null) { clusterId_ = builderForValue.build(); onChanged(); } else { clusterIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000010; return this; } /** * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> * * <pre> * *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> */ @java.lang.Deprecated public Builder mergeClusterId(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID value) { if (clusterIdBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010) && clusterId_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance()) { clusterId_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.newBuilder(clusterId_).mergeFrom(value).buildPartial(); } else { clusterId_ = value; } onChanged(); } else { clusterIdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000010; return this; } /** * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> * * <pre> * *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> */ @java.lang.Deprecated public Builder clearClusterId() { if (clusterIdBuilder_ == null) { clusterId_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance(); onChanged(); } else { clusterIdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); return this; } /** * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> * * <pre> * *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> */ @java.lang.Deprecated public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder getClusterIdBuilder() { bitField0_ |= 0x00000010; onChanged(); return getClusterIdFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> * * <pre> * *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> */ @java.lang.Deprecated public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder() { if (clusterIdBuilder_ != null) { return clusterIdBuilder_.getMessageOrBuilder(); } else { return clusterId_; } } /** * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code> * * <pre> * *This parameter is deprecated in favor of clusters which *contains the list of clusters that have consumed the change. *It is retained so that the log created by earlier releases (0.94) *can be read by the newer releases. * </pre> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder> getClusterIdFieldBuilder() { if (clusterIdBuilder_ == null) { clusterIdBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder>( clusterId_, getParentForChildren(), isClean()); clusterId_ = null; } return clusterIdBuilder_; } // repeated .hbase.pb.FamilyScope scopes = 6; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> scopes_ = java.util.Collections.emptyList(); private void ensureScopesIsMutable() { if (!((bitField0_ & 0x00000020) == 0x00000020)) { scopes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope>(scopes_); bitField0_ |= 0x00000020; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder> scopesBuilder_; /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> getScopesList() { if (scopesBuilder_ == null) { return java.util.Collections.unmodifiableList(scopes_); } else { return scopesBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public int getScopesCount() { if (scopesBuilder_ == null) { return scopes_.size(); } else { return scopesBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope getScopes(int index) { if (scopesBuilder_ == null) { return scopes_.get(index); } else { return scopesBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public Builder setScopes( int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope value) { if (scopesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureScopesIsMutable(); scopes_.set(index, value); onChanged(); } else { scopesBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public Builder setScopes( int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder builderForValue) { if (scopesBuilder_ == null) { ensureScopesIsMutable(); scopes_.set(index, builderForValue.build()); onChanged(); } else { scopesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public Builder addScopes(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope value) { if (scopesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureScopesIsMutable(); scopes_.add(value); onChanged(); } else { scopesBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public Builder addScopes( int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope value) { if (scopesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureScopesIsMutable(); scopes_.add(index, value); onChanged(); } else { scopesBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public Builder addScopes( org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder builderForValue) { if (scopesBuilder_ == null) { ensureScopesIsMutable(); scopes_.add(builderForValue.build()); onChanged(); } else { scopesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public Builder addScopes( int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder builderForValue) { if (scopesBuilder_ == null) { ensureScopesIsMutable(); scopes_.add(index, builderForValue.build()); onChanged(); } else { scopesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public Builder addAllScopes( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope> values) { if (scopesBuilder_ == null) { ensureScopesIsMutable(); super.addAll(values, scopes_); onChanged(); } else { scopesBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public Builder clearScopes() { if (scopesBuilder_ == null) { scopes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000020); onChanged(); } else { scopesBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public Builder removeScopes(int index) { if (scopesBuilder_ == null) { ensureScopesIsMutable(); scopes_.remove(index); onChanged(); } else { scopesBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder getScopesBuilder( int index) { return getScopesFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder getScopesOrBuilder( int index) { if (scopesBuilder_ == null) { return scopes_.get(index); } else { return scopesBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder> getScopesOrBuilderList() { if (scopesBuilder_ != null) { return scopesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(scopes_); } } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder addScopesBuilder() { return getScopesFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance()); } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder addScopesBuilder( int index) { return getScopesFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance()); } /** * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder> getScopesBuilderList() { return getScopesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder> getScopesFieldBuilder() { if (scopesBuilder_ == null) { scopesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder>( scopes_, ((bitField0_ & 0x00000020) == 0x00000020), getParentForChildren(), isClean()); scopes_ = null; } return scopesBuilder_; } // optional uint32 following_kv_count = 7; private int followingKvCount_ ; /** * <code>optional uint32 following_kv_count = 7;</code> */ public boolean hasFollowingKvCount() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional uint32 following_kv_count = 7;</code> */ public int getFollowingKvCount() { return followingKvCount_; } /** * <code>optional uint32 following_kv_count = 7;</code> */ public Builder setFollowingKvCount(int value) { bitField0_ |= 0x00000040; followingKvCount_ = value; onChanged(); return this; } /** * <code>optional uint32 following_kv_count = 7;</code> */ public Builder clearFollowingKvCount() { bitField0_ = (bitField0_ & ~0x00000040); followingKvCount_ = 0; onChanged(); return this; } // repeated .hbase.pb.UUID cluster_ids = 8; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID> clusterIds_ = java.util.Collections.emptyList(); private void ensureClusterIdsIsMutable() { if (!((bitField0_ & 0x00000080) == 0x00000080)) { clusterIds_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID>(clusterIds_); bitField0_ |= 0x00000080; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder> clusterIdsBuilder_; /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID> getClusterIdsList() { if (clusterIdsBuilder_ == null) { return java.util.Collections.unmodifiableList(clusterIds_); } else { return clusterIdsBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public int getClusterIdsCount() { if (clusterIdsBuilder_ == null) { return clusterIds_.size(); } else { return clusterIdsBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterIds(int index) { if (clusterIdsBuilder_ == null) { return clusterIds_.get(index); } else { return clusterIdsBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public Builder setClusterIds( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID value) { if (clusterIdsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureClusterIdsIsMutable(); clusterIds_.set(index, value); onChanged(); } else { clusterIdsBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public Builder setClusterIds( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder builderForValue) { if (clusterIdsBuilder_ == null) { ensureClusterIdsIsMutable(); clusterIds_.set(index, builderForValue.build()); onChanged(); } else { clusterIdsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public Builder addClusterIds(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID value) { if (clusterIdsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureClusterIdsIsMutable(); clusterIds_.add(value); onChanged(); } else { clusterIdsBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public Builder addClusterIds( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID value) { if (clusterIdsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureClusterIdsIsMutable(); clusterIds_.add(index, value); onChanged(); } else { clusterIdsBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public Builder addClusterIds( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder builderForValue) { if (clusterIdsBuilder_ == null) { ensureClusterIdsIsMutable(); clusterIds_.add(builderForValue.build()); onChanged(); } else { clusterIdsBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public Builder addClusterIds( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder builderForValue) { if (clusterIdsBuilder_ == null) { ensureClusterIdsIsMutable(); clusterIds_.add(index, builderForValue.build()); onChanged(); } else { clusterIdsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public Builder addAllClusterIds( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID> values) { if (clusterIdsBuilder_ == null) { ensureClusterIdsIsMutable(); super.addAll(values, clusterIds_); onChanged(); } else { clusterIdsBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public Builder clearClusterIds() { if (clusterIdsBuilder_ == null) { clusterIds_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000080); onChanged(); } else { clusterIdsBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public Builder removeClusterIds(int index) { if (clusterIdsBuilder_ == null) { ensureClusterIdsIsMutable(); clusterIds_.remove(index); onChanged(); } else { clusterIdsBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder getClusterIdsBuilder( int index) { return getClusterIdsFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdsOrBuilder( int index) { if (clusterIdsBuilder_ == null) { return clusterIds_.get(index); } else { return clusterIdsBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder> getClusterIdsOrBuilderList() { if (clusterIdsBuilder_ != null) { return clusterIdsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(clusterIds_); } } /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder addClusterIdsBuilder() { return getClusterIdsFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance()); } /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder addClusterIdsBuilder( int index) { return getClusterIdsFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance()); } /** * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code> * * <pre> * *This field contains the list of clusters that have *consumed the change * </pre> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder> getClusterIdsBuilderList() { return getClusterIdsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder> getClusterIdsFieldBuilder() { if (clusterIdsBuilder_ == null) { clusterIdsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder>( clusterIds_, ((bitField0_ & 0x00000080) == 0x00000080), getParentForChildren(), isClean()); clusterIds_ = null; } return clusterIdsBuilder_; } // optional uint64 nonceGroup = 9; private long nonceGroup_ ; /** * <code>optional uint64 nonceGroup = 9;</code> */ public boolean hasNonceGroup() { return ((bitField0_ & 0x00000100) == 0x00000100); } /** * <code>optional uint64 nonceGroup = 9;</code> */ public long getNonceGroup() { return nonceGroup_; } /** * <code>optional uint64 nonceGroup = 9;</code> */ public Builder setNonceGroup(long value) { bitField0_ |= 0x00000100; nonceGroup_ = value; onChanged(); return this; } /** * <code>optional uint64 nonceGroup = 9;</code> */ public Builder clearNonceGroup() { bitField0_ = (bitField0_ & ~0x00000100); nonceGroup_ = 0L; onChanged(); return this; } // optional uint64 nonce = 10; private long nonce_ ; /** * <code>optional uint64 nonce = 10;</code> */ public boolean hasNonce() { return ((bitField0_ & 0x00000200) == 0x00000200); } /** * <code>optional uint64 nonce = 10;</code> */ public long getNonce() { return nonce_; } /** * <code>optional uint64 nonce = 10;</code> */ public Builder setNonce(long value) { bitField0_ |= 0x00000200; nonce_ = value; onChanged(); return this; } /** * <code>optional uint64 nonce = 10;</code> */ public Builder clearNonce() { bitField0_ = (bitField0_ & ~0x00000200); nonce_ = 0L; onChanged(); return this; } // optional uint64 orig_sequence_number = 11; private long origSequenceNumber_ ; /** * <code>optional uint64 orig_sequence_number = 11;</code> */ public boolean hasOrigSequenceNumber() { return ((bitField0_ & 0x00000400) == 0x00000400); } /** * <code>optional uint64 orig_sequence_number = 11;</code> */ public long getOrigSequenceNumber() { return origSequenceNumber_; } /** * <code>optional uint64 orig_sequence_number = 11;</code> */ public Builder setOrigSequenceNumber(long value) { bitField0_ |= 0x00000400; origSequenceNumber_ = value; onChanged(); return this; } /** * <code>optional uint64 orig_sequence_number = 11;</code> */ public Builder clearOrigSequenceNumber() { bitField0_ = (bitField0_ & ~0x00000400); origSequenceNumber_ = 0L; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.WALKey) } static { defaultInstance = new WALKey(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.WALKey) } public interface FamilyScopeOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bytes family = 1; /** * <code>required bytes family = 1;</code> */ boolean hasFamily(); /** * <code>required bytes family = 1;</code> */ com.google.protobuf.ByteString getFamily(); // required .hbase.pb.ScopeType scope_type = 2; /** * <code>required .hbase.pb.ScopeType scope_type = 2;</code> */ boolean hasScopeType(); /** * <code>required .hbase.pb.ScopeType scope_type = 2;</code> */ org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType getScopeType(); } /** * Protobuf type {@code hbase.pb.FamilyScope} */ public static final class FamilyScope extends com.google.protobuf.GeneratedMessage implements FamilyScopeOrBuilder { // Use FamilyScope.newBuilder() to construct. private FamilyScope(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private FamilyScope(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final FamilyScope defaultInstance; public static FamilyScope getDefaultInstance() { return defaultInstance; } public FamilyScope getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FamilyScope( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; family_ = input.readBytes(); break; } case 16: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType value = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(2, rawValue); } else { bitField0_ |= 0x00000002; scopeType_ = value; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FamilyScope_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FamilyScope_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder.class); } public static com.google.protobuf.Parser<FamilyScope> PARSER = new com.google.protobuf.AbstractParser<FamilyScope>() { public FamilyScope parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new FamilyScope(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<FamilyScope> getParserForType() { return PARSER; } private int bitField0_; // required bytes family = 1; public static final int FAMILY_FIELD_NUMBER = 1; private com.google.protobuf.ByteString family_; /** * <code>required bytes family = 1;</code> */ public boolean hasFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes family = 1;</code> */ public com.google.protobuf.ByteString getFamily() { return family_; } // required .hbase.pb.ScopeType scope_type = 2; public static final int SCOPE_TYPE_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType scopeType_; /** * <code>required .hbase.pb.ScopeType scope_type = 2;</code> */ public boolean hasScopeType() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .hbase.pb.ScopeType scope_type = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType getScopeType() { return scopeType_; } private void initFields() { family_ = com.google.protobuf.ByteString.EMPTY; scopeType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasFamily()) { memoizedIsInitialized = 0; return false; } if (!hasScopeType()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, family_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeEnum(2, scopeType_.getNumber()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, family_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(2, scopeType_.getNumber()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope) obj; boolean result = true; result = result && (hasFamily() == other.hasFamily()); if (hasFamily()) { result = result && getFamily() .equals(other.getFamily()); } result = result && (hasScopeType() == other.hasScopeType()); if (hasScopeType()) { result = result && (getScopeType() == other.getScopeType()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFamily()) { hash = (37 * hash) + FAMILY_FIELD_NUMBER; hash = (53 * hash) + getFamily().hashCode(); } if (hasScopeType()) { hash = (37 * hash) + SCOPE_TYPE_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getScopeType()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.FamilyScope} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FamilyScope_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FamilyScope_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); family_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); scopeType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FamilyScope_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope build() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope buildPartial() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.family_ = family_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.scopeType_ = scopeType_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope other) { if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance()) return this; if (other.hasFamily()) { setFamily(other.getFamily()); } if (other.hasScopeType()) { setScopeType(other.getScopeType()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasFamily()) { return false; } if (!hasScopeType()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required bytes family = 1; private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes family = 1;</code> */ public boolean hasFamily() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes family = 1;</code> */ public com.google.protobuf.ByteString getFamily() { return family_; } /** * <code>required bytes family = 1;</code> */ public Builder setFamily(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; family_ = value; onChanged(); return this; } /** * <code>required bytes family = 1;</code> */ public Builder clearFamily() { bitField0_ = (bitField0_ & ~0x00000001); family_ = getDefaultInstance().getFamily(); onChanged(); return this; } // required .hbase.pb.ScopeType scope_type = 2; private org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType scopeType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL; /** * <code>required .hbase.pb.ScopeType scope_type = 2;</code> */ public boolean hasScopeType() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .hbase.pb.ScopeType scope_type = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType getScopeType() { return scopeType_; } /** * <code>required .hbase.pb.ScopeType scope_type = 2;</code> */ public Builder setScopeType(org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; scopeType_ = value; onChanged(); return this; } /** * <code>required .hbase.pb.ScopeType scope_type = 2;</code> */ public Builder clearScopeType() { bitField0_ = (bitField0_ & ~0x00000002); scopeType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.FamilyScope) } static { defaultInstance = new FamilyScope(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.FamilyScope) } public interface CompactionDescriptorOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bytes table_name = 1; /** * <code>required bytes table_name = 1;</code> * * <pre> * TODO: WALKey already stores these, might remove * </pre> */ boolean hasTableName(); /** * <code>required bytes table_name = 1;</code> * * <pre> * TODO: WALKey already stores these, might remove * </pre> */ com.google.protobuf.ByteString getTableName(); // required bytes encoded_region_name = 2; /** * <code>required bytes encoded_region_name = 2;</code> */ boolean hasEncodedRegionName(); /** * <code>required bytes encoded_region_name = 2;</code> */ com.google.protobuf.ByteString getEncodedRegionName(); // required bytes family_name = 3; /** * <code>required bytes family_name = 3;</code> */ boolean hasFamilyName(); /** * <code>required bytes family_name = 3;</code> */ com.google.protobuf.ByteString getFamilyName(); // repeated string compaction_input = 4; /** * <code>repeated string compaction_input = 4;</code> * * <pre> * relative to store dir * </pre> */ java.util.List<java.lang.String> getCompactionInputList(); /** * <code>repeated string compaction_input = 4;</code> * * <pre> * relative to store dir * </pre> */ int getCompactionInputCount(); /** * <code>repeated string compaction_input = 4;</code> * * <pre> * relative to store dir * </pre> */ java.lang.String getCompactionInput(int index); /** * <code>repeated string compaction_input = 4;</code> * * <pre> * relative to store dir * </pre> */ com.google.protobuf.ByteString getCompactionInputBytes(int index); // repeated string compaction_output = 5; /** * <code>repeated string compaction_output = 5;</code> */ java.util.List<java.lang.String> getCompactionOutputList(); /** * <code>repeated string compaction_output = 5;</code> */ int getCompactionOutputCount(); /** * <code>repeated string compaction_output = 5;</code> */ java.lang.String getCompactionOutput(int index); /** * <code>repeated string compaction_output = 5;</code> */ com.google.protobuf.ByteString getCompactionOutputBytes(int index); // required string store_home_dir = 6; /** * <code>required string store_home_dir = 6;</code> * * <pre> * relative to region dir * </pre> */ boolean hasStoreHomeDir(); /** * <code>required string store_home_dir = 6;</code> * * <pre> * relative to region dir * </pre> */ java.lang.String getStoreHomeDir(); /** * <code>required string store_home_dir = 6;</code> * * <pre> * relative to region dir * </pre> */ com.google.protobuf.ByteString getStoreHomeDirBytes(); // optional bytes region_name = 7; /** * <code>optional bytes region_name = 7;</code> * * <pre> * full region name * </pre> */ boolean hasRegionName(); /** * <code>optional bytes region_name = 7;</code> * * <pre> * full region name * </pre> */ com.google.protobuf.ByteString getRegionName(); } /** * Protobuf type {@code hbase.pb.CompactionDescriptor} * * <pre> ** * Special WAL entry to hold all related to a compaction. * Written to WAL before completing compaction. There is * sufficient info in the below message to complete later * the * compaction should we fail the WAL write. * </pre> */ public static final class CompactionDescriptor extends com.google.protobuf.GeneratedMessage implements CompactionDescriptorOrBuilder { // Use CompactionDescriptor.newBuilder() to construct. private CompactionDescriptor(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private CompactionDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final CompactionDescriptor defaultInstance; public static CompactionDescriptor getDefaultInstance() { return defaultInstance; } public CompactionDescriptor getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CompactionDescriptor( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; tableName_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; encodedRegionName_ = input.readBytes(); break; } case 26: { bitField0_ |= 0x00000004; familyName_ = input.readBytes(); break; } case 34: { if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { compactionInput_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000008; } compactionInput_.add(input.readBytes()); break; } case 42: { if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { compactionOutput_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000010; } compactionOutput_.add(input.readBytes()); break; } case 50: { bitField0_ |= 0x00000008; storeHomeDir_ = input.readBytes(); break; } case 58: { bitField0_ |= 0x00000010; regionName_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { compactionInput_ = new com.google.protobuf.UnmodifiableLazyStringList(compactionInput_); } if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { compactionOutput_ = new com.google.protobuf.UnmodifiableLazyStringList(compactionOutput_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_CompactionDescriptor_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_CompactionDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.Builder.class); } public static com.google.protobuf.Parser<CompactionDescriptor> PARSER = new com.google.protobuf.AbstractParser<CompactionDescriptor>() { public CompactionDescriptor parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new CompactionDescriptor(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<CompactionDescriptor> getParserForType() { return PARSER; } private int bitField0_; // required bytes table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString tableName_; /** * <code>required bytes table_name = 1;</code> * * <pre> * TODO: WALKey already stores these, might remove * </pre> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes table_name = 1;</code> * * <pre> * TODO: WALKey already stores these, might remove * </pre> */ public com.google.protobuf.ByteString getTableName() { return tableName_; } // required bytes encoded_region_name = 2; public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 2; private com.google.protobuf.ByteString encodedRegionName_; /** * <code>required bytes encoded_region_name = 2;</code> */ public boolean hasEncodedRegionName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes encoded_region_name = 2;</code> */ public com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } // required bytes family_name = 3; public static final int FAMILY_NAME_FIELD_NUMBER = 3; private com.google.protobuf.ByteString familyName_; /** * <code>required bytes family_name = 3;</code> */ public boolean hasFamilyName() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required bytes family_name = 3;</code> */ public com.google.protobuf.ByteString getFamilyName() { return familyName_; } // repeated string compaction_input = 4; public static final int COMPACTION_INPUT_FIELD_NUMBER = 4; private com.google.protobuf.LazyStringList compactionInput_; /** * <code>repeated string compaction_input = 4;</code> * * <pre> * relative to store dir * </pre> */ public java.util.List<java.lang.String> getCompactionInputList() { return compactionInput_; } /** * <code>repeated string compaction_input = 4;</code> * * <pre> * relative to store dir * </pre> */ public int getCompactionInputCount() { return compactionInput_.size(); } /** * <code>repeated string compaction_input = 4;</code> * * <pre> * relative to store dir * </pre> */ public java.lang.String getCompactionInput(int index) { return compactionInput_.get(index); } /** * <code>repeated string compaction_input = 4;</code> * * <pre> * relative to store dir * </pre> */ public com.google.protobuf.ByteString getCompactionInputBytes(int index) { return compactionInput_.getByteString(index); } // repeated string compaction_output = 5; public static final int COMPACTION_OUTPUT_FIELD_NUMBER = 5; private com.google.protobuf.LazyStringList compactionOutput_; /** * <code>repeated string compaction_output = 5;</code> */ public java.util.List<java.lang.String> getCompactionOutputList() { return compactionOutput_; } /** * <code>repeated string compaction_output = 5;</code> */ public int getCompactionOutputCount() { return compactionOutput_.size(); } /** * <code>repeated string compaction_output = 5;</code> */ public java.lang.String getCompactionOutput(int index) { return compactionOutput_.get(index); } /** * <code>repeated string compaction_output = 5;</code> */ public com.google.protobuf.ByteString getCompactionOutputBytes(int index) { return compactionOutput_.getByteString(index); } // required string store_home_dir = 6; public static final int STORE_HOME_DIR_FIELD_NUMBER = 6; private java.lang.Object storeHomeDir_; /** * <code>required string store_home_dir = 6;</code> * * <pre> * relative to region dir * </pre> */ public boolean hasStoreHomeDir() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>required string store_home_dir = 6;</code> * * <pre> * relative to region dir * </pre> */ public java.lang.String getStoreHomeDir() { java.lang.Object ref = storeHomeDir_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { storeHomeDir_ = s; } return s; } } /** * <code>required string store_home_dir = 6;</code> * * <pre> * relative to region dir * </pre> */ public com.google.protobuf.ByteString getStoreHomeDirBytes() { java.lang.Object ref = storeHomeDir_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); storeHomeDir_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional bytes region_name = 7; public static final int REGION_NAME_FIELD_NUMBER = 7; private com.google.protobuf.ByteString regionName_; /** * <code>optional bytes region_name = 7;</code> * * <pre> * full region name * </pre> */ public boolean hasRegionName() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional bytes region_name = 7;</code> * * <pre> * full region name * </pre> */ public com.google.protobuf.ByteString getRegionName() { return regionName_; } private void initFields() { tableName_ = com.google.protobuf.ByteString.EMPTY; encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; familyName_ = com.google.protobuf.ByteString.EMPTY; compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY; compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; storeHomeDir_ = ""; regionName_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!hasEncodedRegionName()) { memoizedIsInitialized = 0; return false; } if (!hasFamilyName()) { memoizedIsInitialized = 0; return false; } if (!hasStoreHomeDir()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, encodedRegionName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, familyName_); } for (int i = 0; i < compactionInput_.size(); i++) { output.writeBytes(4, compactionInput_.getByteString(i)); } for (int i = 0; i < compactionOutput_.size(); i++) { output.writeBytes(5, compactionOutput_.getByteString(i)); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBytes(6, getStoreHomeDirBytes()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBytes(7, regionName_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, encodedRegionName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, familyName_); } { int dataSize = 0; for (int i = 0; i < compactionInput_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream .computeBytesSizeNoTag(compactionInput_.getByteString(i)); } size += dataSize; size += 1 * getCompactionInputList().size(); } { int dataSize = 0; for (int i = 0; i < compactionOutput_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream .computeBytesSizeNoTag(compactionOutput_.getByteString(i)); } size += dataSize; size += 1 * getCompactionOutputList().size(); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(6, getStoreHomeDirBytes()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(7, regionName_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); if (hasEncodedRegionName()) { result = result && getEncodedRegionName() .equals(other.getEncodedRegionName()); } result = result && (hasFamilyName() == other.hasFamilyName()); if (hasFamilyName()) { result = result && getFamilyName() .equals(other.getFamilyName()); } result = result && getCompactionInputList() .equals(other.getCompactionInputList()); result = result && getCompactionOutputList() .equals(other.getCompactionOutputList()); result = result && (hasStoreHomeDir() == other.hasStoreHomeDir()); if (hasStoreHomeDir()) { result = result && getStoreHomeDir() .equals(other.getStoreHomeDir()); } result = result && (hasRegionName() == other.hasRegionName()); if (hasRegionName()) { result = result && getRegionName() .equals(other.getRegionName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (hasEncodedRegionName()) { hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getEncodedRegionName().hashCode(); } if (hasFamilyName()) { hash = (37 * hash) + FAMILY_NAME_FIELD_NUMBER; hash = (53 * hash) + getFamilyName().hashCode(); } if (getCompactionInputCount() > 0) { hash = (37 * hash) + COMPACTION_INPUT_FIELD_NUMBER; hash = (53 * hash) + getCompactionInputList().hashCode(); } if (getCompactionOutputCount() > 0) { hash = (37 * hash) + COMPACTION_OUTPUT_FIELD_NUMBER; hash = (53 * hash) + getCompactionOutputList().hashCode(); } if (hasStoreHomeDir()) { hash = (37 * hash) + STORE_HOME_DIR_FIELD_NUMBER; hash = (53 * hash) + getStoreHomeDir().hashCode(); } if (hasRegionName()) { hash = (37 * hash) + REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getRegionName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.CompactionDescriptor} * * <pre> ** * Special WAL entry to hold all related to a compaction. * Written to WAL before completing compaction. There is * sufficient info in the below message to complete later * the * compaction should we fail the WAL write. * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_CompactionDescriptor_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_CompactionDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); tableName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); familyName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000008); compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000010); storeHomeDir_ = ""; bitField0_ = (bitField0_ & ~0x00000020); regionName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000040); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_CompactionDescriptor_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor build() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor buildPartial() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.tableName_ = tableName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.encodedRegionName_ = encodedRegionName_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.familyName_ = familyName_; if (((bitField0_ & 0x00000008) == 0x00000008)) { compactionInput_ = new com.google.protobuf.UnmodifiableLazyStringList( compactionInput_); bitField0_ = (bitField0_ & ~0x00000008); } result.compactionInput_ = compactionInput_; if (((bitField0_ & 0x00000010) == 0x00000010)) { compactionOutput_ = new com.google.protobuf.UnmodifiableLazyStringList( compactionOutput_); bitField0_ = (bitField0_ & ~0x00000010); } result.compactionOutput_ = compactionOutput_; if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000008; } result.storeHomeDir_ = storeHomeDir_; if (((from_bitField0_ & 0x00000040) == 0x00000040)) { to_bitField0_ |= 0x00000010; } result.regionName_ = regionName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor other) { if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.getDefaultInstance()) return this; if (other.hasTableName()) { setTableName(other.getTableName()); } if (other.hasEncodedRegionName()) { setEncodedRegionName(other.getEncodedRegionName()); } if (other.hasFamilyName()) { setFamilyName(other.getFamilyName()); } if (!other.compactionInput_.isEmpty()) { if (compactionInput_.isEmpty()) { compactionInput_ = other.compactionInput_; bitField0_ = (bitField0_ & ~0x00000008); } else { ensureCompactionInputIsMutable(); compactionInput_.addAll(other.compactionInput_); } onChanged(); } if (!other.compactionOutput_.isEmpty()) { if (compactionOutput_.isEmpty()) { compactionOutput_ = other.compactionOutput_; bitField0_ = (bitField0_ & ~0x00000010); } else { ensureCompactionOutputIsMutable(); compactionOutput_.addAll(other.compactionOutput_); } onChanged(); } if (other.hasStoreHomeDir()) { bitField0_ |= 0x00000020; storeHomeDir_ = other.storeHomeDir_; onChanged(); } if (other.hasRegionName()) { setRegionName(other.getRegionName()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasTableName()) { return false; } if (!hasEncodedRegionName()) { return false; } if (!hasFamilyName()) { return false; } if (!hasStoreHomeDir()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required bytes table_name = 1; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes table_name = 1;</code> * * <pre> * TODO: WALKey already stores these, might remove * </pre> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes table_name = 1;</code> * * <pre> * TODO: WALKey already stores these, might remove * </pre> */ public com.google.protobuf.ByteString getTableName() { return tableName_; } /** * <code>required bytes table_name = 1;</code> * * <pre> * TODO: WALKey already stores these, might remove * </pre> */ public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; tableName_ = value; onChanged(); return this; } /** * <code>required bytes table_name = 1;</code> * * <pre> * TODO: WALKey already stores these, might remove * </pre> */ public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000001); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } // required bytes encoded_region_name = 2; private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes encoded_region_name = 2;</code> */ public boolean hasEncodedRegionName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes encoded_region_name = 2;</code> */ public com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } /** * <code>required bytes encoded_region_name = 2;</code> */ public Builder setEncodedRegionName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; encodedRegionName_ = value; onChanged(); return this; } /** * <code>required bytes encoded_region_name = 2;</code> */ public Builder clearEncodedRegionName() { bitField0_ = (bitField0_ & ~0x00000002); encodedRegionName_ = getDefaultInstance().getEncodedRegionName(); onChanged(); return this; } // required bytes family_name = 3; private com.google.protobuf.ByteString familyName_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes family_name = 3;</code> */ public boolean hasFamilyName() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required bytes family_name = 3;</code> */ public com.google.protobuf.ByteString getFamilyName() { return familyName_; } /** * <code>required bytes family_name = 3;</code> */ public Builder setFamilyName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; familyName_ = value; onChanged(); return this; } /** * <code>required bytes family_name = 3;</code> */ public Builder clearFamilyName() { bitField0_ = (bitField0_ & ~0x00000004); familyName_ = getDefaultInstance().getFamilyName(); onChanged(); return this; } // repeated string compaction_input = 4; private com.google.protobuf.LazyStringList compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureCompactionInputIsMutable() { if (!((bitField0_ & 0x00000008) == 0x00000008)) { compactionInput_ = new com.google.protobuf.LazyStringArrayList(compactionInput_); bitField0_ |= 0x00000008; } } /** * <code>repeated string compaction_input = 4;</code> * * <pre> * relative to store dir * </pre> */ public java.util.List<java.lang.String> getCompactionInputList() { return java.util.Collections.unmodifiableList(compactionInput_); } /** * <code>repeated string compaction_input = 4;</code> * * <pre> * relative to store dir * </pre> */ public int getCompactionInputCount() { return compactionInput_.size(); } /** * <code>repeated string compaction_input = 4;</code> * * <pre> * relative to store dir * </pre> */ public java.lang.String getCompactionInput(int index) { return compactionInput_.get(index); } /** * <code>repeated string compaction_input = 4;</code> * * <pre> * relative to store dir * </pre> */ public com.google.protobuf.ByteString getCompactionInputBytes(int index) { return compactionInput_.getByteString(index); } /** * <code>repeated string compaction_input = 4;</code> * * <pre> * relative to store dir * </pre> */ public Builder setCompactionInput( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureCompactionInputIsMutable(); compactionInput_.set(index, value); onChanged(); return this; } /** * <code>repeated string compaction_input = 4;</code> * * <pre> * relative to store dir * </pre> */ public Builder addCompactionInput( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureCompactionInputIsMutable(); compactionInput_.add(value); onChanged(); return this; } /** * <code>repeated string compaction_input = 4;</code> * * <pre> * relative to store dir * </pre> */ public Builder addAllCompactionInput( java.lang.Iterable<java.lang.String> values) { ensureCompactionInputIsMutable(); super.addAll(values, compactionInput_); onChanged(); return this; } /** * <code>repeated string compaction_input = 4;</code> * * <pre> * relative to store dir * </pre> */ public Builder clearCompactionInput() { compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * <code>repeated string compaction_input = 4;</code> * * <pre> * relative to store dir * </pre> */ public Builder addCompactionInputBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureCompactionInputIsMutable(); compactionInput_.add(value); onChanged(); return this; } // repeated string compaction_output = 5; private com.google.protobuf.LazyStringList compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureCompactionOutputIsMutable() { if (!((bitField0_ & 0x00000010) == 0x00000010)) { compactionOutput_ = new com.google.protobuf.LazyStringArrayList(compactionOutput_); bitField0_ |= 0x00000010; } } /** * <code>repeated string compaction_output = 5;</code> */ public java.util.List<java.lang.String> getCompactionOutputList() { return java.util.Collections.unmodifiableList(compactionOutput_); } /** * <code>repeated string compaction_output = 5;</code> */ public int getCompactionOutputCount() { return compactionOutput_.size(); } /** * <code>repeated string compaction_output = 5;</code> */ public java.lang.String getCompactionOutput(int index) { return compactionOutput_.get(index); } /** * <code>repeated string compaction_output = 5;</code> */ public com.google.protobuf.ByteString getCompactionOutputBytes(int index) { return compactionOutput_.getByteString(index); } /** * <code>repeated string compaction_output = 5;</code> */ public Builder setCompactionOutput( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureCompactionOutputIsMutable(); compactionOutput_.set(index, value); onChanged(); return this; } /** * <code>repeated string compaction_output = 5;</code> */ public Builder addCompactionOutput( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureCompactionOutputIsMutable(); compactionOutput_.add(value); onChanged(); return this; } /** * <code>repeated string compaction_output = 5;</code> */ public Builder addAllCompactionOutput( java.lang.Iterable<java.lang.String> values) { ensureCompactionOutputIsMutable(); super.addAll(values, compactionOutput_); onChanged(); return this; } /** * <code>repeated string compaction_output = 5;</code> */ public Builder clearCompactionOutput() { compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } /** * <code>repeated string compaction_output = 5;</code> */ public Builder addCompactionOutputBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureCompactionOutputIsMutable(); compactionOutput_.add(value); onChanged(); return this; } // required string store_home_dir = 6; private java.lang.Object storeHomeDir_ = ""; /** * <code>required string store_home_dir = 6;</code> * * <pre> * relative to region dir * </pre> */ public boolean hasStoreHomeDir() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>required string store_home_dir = 6;</code> * * <pre> * relative to region dir * </pre> */ public java.lang.String getStoreHomeDir() { java.lang.Object ref = storeHomeDir_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); storeHomeDir_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string store_home_dir = 6;</code> * * <pre> * relative to region dir * </pre> */ public com.google.protobuf.ByteString getStoreHomeDirBytes() { java.lang.Object ref = storeHomeDir_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); storeHomeDir_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string store_home_dir = 6;</code> * * <pre> * relative to region dir * </pre> */ public Builder setStoreHomeDir( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; storeHomeDir_ = value; onChanged(); return this; } /** * <code>required string store_home_dir = 6;</code> * * <pre> * relative to region dir * </pre> */ public Builder clearStoreHomeDir() { bitField0_ = (bitField0_ & ~0x00000020); storeHomeDir_ = getDefaultInstance().getStoreHomeDir(); onChanged(); return this; } /** * <code>required string store_home_dir = 6;</code> * * <pre> * relative to region dir * </pre> */ public Builder setStoreHomeDirBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; storeHomeDir_ = value; onChanged(); return this; } // optional bytes region_name = 7; private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes region_name = 7;</code> * * <pre> * full region name * </pre> */ public boolean hasRegionName() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional bytes region_name = 7;</code> * * <pre> * full region name * </pre> */ public com.google.protobuf.ByteString getRegionName() { return regionName_; } /** * <code>optional bytes region_name = 7;</code> * * <pre> * full region name * </pre> */ public Builder setRegionName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000040; regionName_ = value; onChanged(); return this; } /** * <code>optional bytes region_name = 7;</code> * * <pre> * full region name * </pre> */ public Builder clearRegionName() { bitField0_ = (bitField0_ & ~0x00000040); regionName_ = getDefaultInstance().getRegionName(); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.CompactionDescriptor) } static { defaultInstance = new CompactionDescriptor(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.CompactionDescriptor) } public interface FlushDescriptorOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .hbase.pb.FlushDescriptor.FlushAction action = 1; /** * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code> */ boolean hasAction(); /** * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction getAction(); // required bytes table_name = 2; /** * <code>required bytes table_name = 2;</code> */ boolean hasTableName(); /** * <code>required bytes table_name = 2;</code> */ com.google.protobuf.ByteString getTableName(); // required bytes encoded_region_name = 3; /** * <code>required bytes encoded_region_name = 3;</code> */ boolean hasEncodedRegionName(); /** * <code>required bytes encoded_region_name = 3;</code> */ com.google.protobuf.ByteString getEncodedRegionName(); // optional uint64 flush_sequence_number = 4; /** * <code>optional uint64 flush_sequence_number = 4;</code> */ boolean hasFlushSequenceNumber(); /** * <code>optional uint64 flush_sequence_number = 4;</code> */ long getFlushSequenceNumber(); // repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5; /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> getStoreFlushesList(); /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor getStoreFlushes(int index); /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ int getStoreFlushesCount(); /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder> getStoreFlushesOrBuilderList(); /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder getStoreFlushesOrBuilder( int index); // optional bytes region_name = 6; /** * <code>optional bytes region_name = 6;</code> * * <pre> * full region name * </pre> */ boolean hasRegionName(); /** * <code>optional bytes region_name = 6;</code> * * <pre> * full region name * </pre> */ com.google.protobuf.ByteString getRegionName(); } /** * Protobuf type {@code hbase.pb.FlushDescriptor} * * <pre> ** * Special WAL entry to hold all related to a flush. * </pre> */ public static final class FlushDescriptor extends com.google.protobuf.GeneratedMessage implements FlushDescriptorOrBuilder { // Use FlushDescriptor.newBuilder() to construct. private FlushDescriptor(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private FlushDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final FlushDescriptor defaultInstance; public static FlushDescriptor getDefaultInstance() { return defaultInstance; } public FlushDescriptor getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private FlushDescriptor( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction value = org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; action_ = value; } break; } case 18: { bitField0_ |= 0x00000002; tableName_ = input.readBytes(); break; } case 26: { bitField0_ |= 0x00000004; encodedRegionName_ = input.readBytes(); break; } case 32: { bitField0_ |= 0x00000008; flushSequenceNumber_ = input.readUInt64(); break; } case 42: { if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { storeFlushes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor>(); mutable_bitField0_ |= 0x00000010; } storeFlushes_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.PARSER, extensionRegistry)); break; } case 50: { bitField0_ |= 0x00000010; regionName_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { storeFlushes_ = java.util.Collections.unmodifiableList(storeFlushes_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.Builder.class); } public static com.google.protobuf.Parser<FlushDescriptor> PARSER = new com.google.protobuf.AbstractParser<FlushDescriptor>() { public FlushDescriptor parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new FlushDescriptor(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<FlushDescriptor> getParserForType() { return PARSER; } /** * Protobuf enum {@code hbase.pb.FlushDescriptor.FlushAction} */ public enum FlushAction implements com.google.protobuf.ProtocolMessageEnum { /** * <code>START_FLUSH = 0;</code> */ START_FLUSH(0, 0), /** * <code>COMMIT_FLUSH = 1;</code> */ COMMIT_FLUSH(1, 1), /** * <code>ABORT_FLUSH = 2;</code> */ ABORT_FLUSH(2, 2), /** * <code>CANNOT_FLUSH = 3;</code> * * <pre> * marker for indicating that a flush has been requested but cannot complete * </pre> */ CANNOT_FLUSH(3, 3), ; /** * <code>START_FLUSH = 0;</code> */ public static final int START_FLUSH_VALUE = 0; /** * <code>COMMIT_FLUSH = 1;</code> */ public static final int COMMIT_FLUSH_VALUE = 1; /** * <code>ABORT_FLUSH = 2;</code> */ public static final int ABORT_FLUSH_VALUE = 2; /** * <code>CANNOT_FLUSH = 3;</code> * * <pre> * marker for indicating that a flush has been requested but cannot complete * </pre> */ public static final int CANNOT_FLUSH_VALUE = 3; public final int getNumber() { return value; } public static FlushAction valueOf(int value) { switch (value) { case 0: return START_FLUSH; case 1: return COMMIT_FLUSH; case 2: return ABORT_FLUSH; case 3: return CANNOT_FLUSH; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<FlushAction> internalGetValueMap() { return internalValueMap; } private static com.google.protobuf.Internal.EnumLiteMap<FlushAction> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<FlushAction>() { public FlushAction findValueByNumber(int number) { return FlushAction.valueOf(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.getDescriptor().getEnumTypes().get(0); } private static final FlushAction[] VALUES = values(); public static FlushAction valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int index; private final int value; private FlushAction(int index, int value) { this.index = index; this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.FlushDescriptor.FlushAction) } public interface StoreFlushDescriptorOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bytes family_name = 1; /** * <code>required bytes family_name = 1;</code> */ boolean hasFamilyName(); /** * <code>required bytes family_name = 1;</code> */ com.google.protobuf.ByteString getFamilyName(); // required string store_home_dir = 2; /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ boolean hasStoreHomeDir(); /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ java.lang.String getStoreHomeDir(); /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ com.google.protobuf.ByteString getStoreHomeDirBytes(); // repeated string flush_output = 3; /** * <code>repeated string flush_output = 3;</code> * * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> */ java.util.List<java.lang.String> getFlushOutputList(); /** * <code>repeated string flush_output = 3;</code> * * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> */ int getFlushOutputCount(); /** * <code>repeated string flush_output = 3;</code> * * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> */ java.lang.String getFlushOutput(int index); /** * <code>repeated string flush_output = 3;</code> * * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> */ com.google.protobuf.ByteString getFlushOutputBytes(int index); } /** * Protobuf type {@code hbase.pb.FlushDescriptor.StoreFlushDescriptor} */ public static final class StoreFlushDescriptor extends com.google.protobuf.GeneratedMessage implements StoreFlushDescriptorOrBuilder { // Use StoreFlushDescriptor.newBuilder() to construct. private StoreFlushDescriptor(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private StoreFlushDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final StoreFlushDescriptor defaultInstance; public static StoreFlushDescriptor getDefaultInstance() { return defaultInstance; } public StoreFlushDescriptor getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private StoreFlushDescriptor( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; familyName_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; storeHomeDir_ = input.readBytes(); break; } case 26: { if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { flushOutput_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000004; } flushOutput_.add(input.readBytes()); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { flushOutput_ = new com.google.protobuf.UnmodifiableLazyStringList(flushOutput_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder.class); } public static com.google.protobuf.Parser<StoreFlushDescriptor> PARSER = new com.google.protobuf.AbstractParser<StoreFlushDescriptor>() { public StoreFlushDescriptor parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new StoreFlushDescriptor(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<StoreFlushDescriptor> getParserForType() { return PARSER; } private int bitField0_; // required bytes family_name = 1; public static final int FAMILY_NAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString familyName_; /** * <code>required bytes family_name = 1;</code> */ public boolean hasFamilyName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes family_name = 1;</code> */ public com.google.protobuf.ByteString getFamilyName() { return familyName_; } // required string store_home_dir = 2; public static final int STORE_HOME_DIR_FIELD_NUMBER = 2; private java.lang.Object storeHomeDir_; /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ public boolean hasStoreHomeDir() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ public java.lang.String getStoreHomeDir() { java.lang.Object ref = storeHomeDir_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { storeHomeDir_ = s; } return s; } } /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ public com.google.protobuf.ByteString getStoreHomeDirBytes() { java.lang.Object ref = storeHomeDir_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); storeHomeDir_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // repeated string flush_output = 3; public static final int FLUSH_OUTPUT_FIELD_NUMBER = 3; private com.google.protobuf.LazyStringList flushOutput_; /** * <code>repeated string flush_output = 3;</code> * * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> */ public java.util.List<java.lang.String> getFlushOutputList() { return flushOutput_; } /** * <code>repeated string flush_output = 3;</code> * * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> */ public int getFlushOutputCount() { return flushOutput_.size(); } /** * <code>repeated string flush_output = 3;</code> * * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> */ public java.lang.String getFlushOutput(int index) { return flushOutput_.get(index); } /** * <code>repeated string flush_output = 3;</code> * * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> */ public com.google.protobuf.ByteString getFlushOutputBytes(int index) { return flushOutput_.getByteString(index); } private void initFields() { familyName_ = com.google.protobuf.ByteString.EMPTY; storeHomeDir_ = ""; flushOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasFamilyName()) { memoizedIsInitialized = 0; return false; } if (!hasStoreHomeDir()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, familyName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, getStoreHomeDirBytes()); } for (int i = 0; i < flushOutput_.size(); i++) { output.writeBytes(3, flushOutput_.getByteString(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, familyName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, getStoreHomeDirBytes()); } { int dataSize = 0; for (int i = 0; i < flushOutput_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream .computeBytesSizeNoTag(flushOutput_.getByteString(i)); } size += dataSize; size += 1 * getFlushOutputList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor) obj; boolean result = true; result = result && (hasFamilyName() == other.hasFamilyName()); if (hasFamilyName()) { result = result && getFamilyName() .equals(other.getFamilyName()); } result = result && (hasStoreHomeDir() == other.hasStoreHomeDir()); if (hasStoreHomeDir()) { result = result && getStoreHomeDir() .equals(other.getStoreHomeDir()); } result = result && getFlushOutputList() .equals(other.getFlushOutputList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFamilyName()) { hash = (37 * hash) + FAMILY_NAME_FIELD_NUMBER; hash = (53 * hash) + getFamilyName().hashCode(); } if (hasStoreHomeDir()) { hash = (37 * hash) + STORE_HOME_DIR_FIELD_NUMBER; hash = (53 * hash) + getStoreHomeDir().hashCode(); } if (getFlushOutputCount() > 0) { hash = (37 * hash) + FLUSH_OUTPUT_FIELD_NUMBER; hash = (53 * hash) + getFlushOutputList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.FlushDescriptor.StoreFlushDescriptor} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); familyName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); storeHomeDir_ = ""; bitField0_ = (bitField0_ & ~0x00000002); flushOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor build() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor buildPartial() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.familyName_ = familyName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.storeHomeDir_ = storeHomeDir_; if (((bitField0_ & 0x00000004) == 0x00000004)) { flushOutput_ = new com.google.protobuf.UnmodifiableLazyStringList( flushOutput_); bitField0_ = (bitField0_ & ~0x00000004); } result.flushOutput_ = flushOutput_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor other) { if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.getDefaultInstance()) return this; if (other.hasFamilyName()) { setFamilyName(other.getFamilyName()); } if (other.hasStoreHomeDir()) { bitField0_ |= 0x00000002; storeHomeDir_ = other.storeHomeDir_; onChanged(); } if (!other.flushOutput_.isEmpty()) { if (flushOutput_.isEmpty()) { flushOutput_ = other.flushOutput_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureFlushOutputIsMutable(); flushOutput_.addAll(other.flushOutput_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasFamilyName()) { return false; } if (!hasStoreHomeDir()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required bytes family_name = 1; private com.google.protobuf.ByteString familyName_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes family_name = 1;</code> */ public boolean hasFamilyName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes family_name = 1;</code> */ public com.google.protobuf.ByteString getFamilyName() { return familyName_; } /** * <code>required bytes family_name = 1;</code> */ public Builder setFamilyName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; familyName_ = value; onChanged(); return this; } /** * <code>required bytes family_name = 1;</code> */ public Builder clearFamilyName() { bitField0_ = (bitField0_ & ~0x00000001); familyName_ = getDefaultInstance().getFamilyName(); onChanged(); return this; } // required string store_home_dir = 2; private java.lang.Object storeHomeDir_ = ""; /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ public boolean hasStoreHomeDir() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ public java.lang.String getStoreHomeDir() { java.lang.Object ref = storeHomeDir_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); storeHomeDir_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ public com.google.protobuf.ByteString getStoreHomeDirBytes() { java.lang.Object ref = storeHomeDir_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); storeHomeDir_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ public Builder setStoreHomeDir( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; storeHomeDir_ = value; onChanged(); return this; } /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ public Builder clearStoreHomeDir() { bitField0_ = (bitField0_ & ~0x00000002); storeHomeDir_ = getDefaultInstance().getStoreHomeDir(); onChanged(); return this; } /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ public Builder setStoreHomeDirBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; storeHomeDir_ = value; onChanged(); return this; } // repeated string flush_output = 3; private com.google.protobuf.LazyStringList flushOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureFlushOutputIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { flushOutput_ = new com.google.protobuf.LazyStringArrayList(flushOutput_); bitField0_ |= 0x00000004; } } /** * <code>repeated string flush_output = 3;</code> * * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> */ public java.util.List<java.lang.String> getFlushOutputList() { return java.util.Collections.unmodifiableList(flushOutput_); } /** * <code>repeated string flush_output = 3;</code> * * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> */ public int getFlushOutputCount() { return flushOutput_.size(); } /** * <code>repeated string flush_output = 3;</code> * * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> */ public java.lang.String getFlushOutput(int index) { return flushOutput_.get(index); } /** * <code>repeated string flush_output = 3;</code> * * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> */ public com.google.protobuf.ByteString getFlushOutputBytes(int index) { return flushOutput_.getByteString(index); } /** * <code>repeated string flush_output = 3;</code> * * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> */ public Builder setFlushOutput( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureFlushOutputIsMutable(); flushOutput_.set(index, value); onChanged(); return this; } /** * <code>repeated string flush_output = 3;</code> * * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> */ public Builder addFlushOutput( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureFlushOutputIsMutable(); flushOutput_.add(value); onChanged(); return this; } /** * <code>repeated string flush_output = 3;</code> * * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> */ public Builder addAllFlushOutput( java.lang.Iterable<java.lang.String> values) { ensureFlushOutputIsMutable(); super.addAll(values, flushOutput_); onChanged(); return this; } /** * <code>repeated string flush_output = 3;</code> * * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> */ public Builder clearFlushOutput() { flushOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * <code>repeated string flush_output = 3;</code> * * <pre> * relative to store dir (if this is a COMMIT_FLUSH) * </pre> */ public Builder addFlushOutputBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureFlushOutputIsMutable(); flushOutput_.add(value); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.FlushDescriptor.StoreFlushDescriptor) } static { defaultInstance = new StoreFlushDescriptor(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.FlushDescriptor.StoreFlushDescriptor) } private int bitField0_; // required .hbase.pb.FlushDescriptor.FlushAction action = 1; public static final int ACTION_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction action_; /** * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code> */ public boolean hasAction() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction getAction() { return action_; } // required bytes table_name = 2; public static final int TABLE_NAME_FIELD_NUMBER = 2; private com.google.protobuf.ByteString tableName_; /** * <code>required bytes table_name = 2;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes table_name = 2;</code> */ public com.google.protobuf.ByteString getTableName() { return tableName_; } // required bytes encoded_region_name = 3; public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 3; private com.google.protobuf.ByteString encodedRegionName_; /** * <code>required bytes encoded_region_name = 3;</code> */ public boolean hasEncodedRegionName() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required bytes encoded_region_name = 3;</code> */ public com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } // optional uint64 flush_sequence_number = 4; public static final int FLUSH_SEQUENCE_NUMBER_FIELD_NUMBER = 4; private long flushSequenceNumber_; /** * <code>optional uint64 flush_sequence_number = 4;</code> */ public boolean hasFlushSequenceNumber() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional uint64 flush_sequence_number = 4;</code> */ public long getFlushSequenceNumber() { return flushSequenceNumber_; } // repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5; public static final int STORE_FLUSHES_FIELD_NUMBER = 5; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> storeFlushes_; /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> getStoreFlushesList() { return storeFlushes_; } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder> getStoreFlushesOrBuilderList() { return storeFlushes_; } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public int getStoreFlushesCount() { return storeFlushes_.size(); } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor getStoreFlushes(int index) { return storeFlushes_.get(index); } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder getStoreFlushesOrBuilder( int index) { return storeFlushes_.get(index); } // optional bytes region_name = 6; public static final int REGION_NAME_FIELD_NUMBER = 6; private com.google.protobuf.ByteString regionName_; /** * <code>optional bytes region_name = 6;</code> * * <pre> * full region name * </pre> */ public boolean hasRegionName() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional bytes region_name = 6;</code> * * <pre> * full region name * </pre> */ public com.google.protobuf.ByteString getRegionName() { return regionName_; } private void initFields() { action_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.START_FLUSH; tableName_ = com.google.protobuf.ByteString.EMPTY; encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; flushSequenceNumber_ = 0L; storeFlushes_ = java.util.Collections.emptyList(); regionName_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasAction()) { memoizedIsInitialized = 0; return false; } if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!hasEncodedRegionName()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getStoreFlushesCount(); i++) { if (!getStoreFlushes(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeEnum(1, action_.getNumber()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, tableName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, encodedRegionName_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeUInt64(4, flushSequenceNumber_); } for (int i = 0; i < storeFlushes_.size(); i++) { output.writeMessage(5, storeFlushes_.get(i)); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBytes(6, regionName_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, action_.getNumber()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, tableName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, encodedRegionName_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(4, flushSequenceNumber_); } for (int i = 0; i < storeFlushes_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(5, storeFlushes_.get(i)); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(6, regionName_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor) obj; boolean result = true; result = result && (hasAction() == other.hasAction()); if (hasAction()) { result = result && (getAction() == other.getAction()); } result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); if (hasEncodedRegionName()) { result = result && getEncodedRegionName() .equals(other.getEncodedRegionName()); } result = result && (hasFlushSequenceNumber() == other.hasFlushSequenceNumber()); if (hasFlushSequenceNumber()) { result = result && (getFlushSequenceNumber() == other.getFlushSequenceNumber()); } result = result && getStoreFlushesList() .equals(other.getStoreFlushesList()); result = result && (hasRegionName() == other.hasRegionName()); if (hasRegionName()) { result = result && getRegionName() .equals(other.getRegionName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasAction()) { hash = (37 * hash) + ACTION_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getAction()); } if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (hasEncodedRegionName()) { hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getEncodedRegionName().hashCode(); } if (hasFlushSequenceNumber()) { hash = (37 * hash) + FLUSH_SEQUENCE_NUMBER_FIELD_NUMBER; hash = (53 * hash) + hashLong(getFlushSequenceNumber()); } if (getStoreFlushesCount() > 0) { hash = (37 * hash) + STORE_FLUSHES_FIELD_NUMBER; hash = (53 * hash) + getStoreFlushesList().hashCode(); } if (hasRegionName()) { hash = (37 * hash) + REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getRegionName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.FlushDescriptor} * * <pre> ** * Special WAL entry to hold all related to a flush. * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getStoreFlushesFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); action_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.START_FLUSH; bitField0_ = (bitField0_ & ~0x00000001); tableName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); flushSequenceNumber_ = 0L; bitField0_ = (bitField0_ & ~0x00000008); if (storeFlushesBuilder_ == null) { storeFlushes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000010); } else { storeFlushesBuilder_.clear(); } regionName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000020); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_FlushDescriptor_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor build() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor buildPartial() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.action_ = action_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.tableName_ = tableName_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.encodedRegionName_ = encodedRegionName_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.flushSequenceNumber_ = flushSequenceNumber_; if (storeFlushesBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010)) { storeFlushes_ = java.util.Collections.unmodifiableList(storeFlushes_); bitField0_ = (bitField0_ & ~0x00000010); } result.storeFlushes_ = storeFlushes_; } else { result.storeFlushes_ = storeFlushesBuilder_.build(); } if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000010; } result.regionName_ = regionName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor other) { if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.getDefaultInstance()) return this; if (other.hasAction()) { setAction(other.getAction()); } if (other.hasTableName()) { setTableName(other.getTableName()); } if (other.hasEncodedRegionName()) { setEncodedRegionName(other.getEncodedRegionName()); } if (other.hasFlushSequenceNumber()) { setFlushSequenceNumber(other.getFlushSequenceNumber()); } if (storeFlushesBuilder_ == null) { if (!other.storeFlushes_.isEmpty()) { if (storeFlushes_.isEmpty()) { storeFlushes_ = other.storeFlushes_; bitField0_ = (bitField0_ & ~0x00000010); } else { ensureStoreFlushesIsMutable(); storeFlushes_.addAll(other.storeFlushes_); } onChanged(); } } else { if (!other.storeFlushes_.isEmpty()) { if (storeFlushesBuilder_.isEmpty()) { storeFlushesBuilder_.dispose(); storeFlushesBuilder_ = null; storeFlushes_ = other.storeFlushes_; bitField0_ = (bitField0_ & ~0x00000010); storeFlushesBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getStoreFlushesFieldBuilder() : null; } else { storeFlushesBuilder_.addAllMessages(other.storeFlushes_); } } } if (other.hasRegionName()) { setRegionName(other.getRegionName()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasAction()) { return false; } if (!hasTableName()) { return false; } if (!hasEncodedRegionName()) { return false; } for (int i = 0; i < getStoreFlushesCount(); i++) { if (!getStoreFlushes(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .hbase.pb.FlushDescriptor.FlushAction action = 1; private org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction action_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.START_FLUSH; /** * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code> */ public boolean hasAction() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction getAction() { return action_; } /** * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code> */ public Builder setAction(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; action_ = value; onChanged(); return this; } /** * <code>required .hbase.pb.FlushDescriptor.FlushAction action = 1;</code> */ public Builder clearAction() { bitField0_ = (bitField0_ & ~0x00000001); action_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction.START_FLUSH; onChanged(); return this; } // required bytes table_name = 2; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes table_name = 2;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes table_name = 2;</code> */ public com.google.protobuf.ByteString getTableName() { return tableName_; } /** * <code>required bytes table_name = 2;</code> */ public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; tableName_ = value; onChanged(); return this; } /** * <code>required bytes table_name = 2;</code> */ public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000002); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } // required bytes encoded_region_name = 3; private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes encoded_region_name = 3;</code> */ public boolean hasEncodedRegionName() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required bytes encoded_region_name = 3;</code> */ public com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } /** * <code>required bytes encoded_region_name = 3;</code> */ public Builder setEncodedRegionName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; encodedRegionName_ = value; onChanged(); return this; } /** * <code>required bytes encoded_region_name = 3;</code> */ public Builder clearEncodedRegionName() { bitField0_ = (bitField0_ & ~0x00000004); encodedRegionName_ = getDefaultInstance().getEncodedRegionName(); onChanged(); return this; } // optional uint64 flush_sequence_number = 4; private long flushSequenceNumber_ ; /** * <code>optional uint64 flush_sequence_number = 4;</code> */ public boolean hasFlushSequenceNumber() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional uint64 flush_sequence_number = 4;</code> */ public long getFlushSequenceNumber() { return flushSequenceNumber_; } /** * <code>optional uint64 flush_sequence_number = 4;</code> */ public Builder setFlushSequenceNumber(long value) { bitField0_ |= 0x00000008; flushSequenceNumber_ = value; onChanged(); return this; } /** * <code>optional uint64 flush_sequence_number = 4;</code> */ public Builder clearFlushSequenceNumber() { bitField0_ = (bitField0_ & ~0x00000008); flushSequenceNumber_ = 0L; onChanged(); return this; } // repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> storeFlushes_ = java.util.Collections.emptyList(); private void ensureStoreFlushesIsMutable() { if (!((bitField0_ & 0x00000010) == 0x00000010)) { storeFlushes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor>(storeFlushes_); bitField0_ |= 0x00000010; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder> storeFlushesBuilder_; /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> getStoreFlushesList() { if (storeFlushesBuilder_ == null) { return java.util.Collections.unmodifiableList(storeFlushes_); } else { return storeFlushesBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public int getStoreFlushesCount() { if (storeFlushesBuilder_ == null) { return storeFlushes_.size(); } else { return storeFlushesBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor getStoreFlushes(int index) { if (storeFlushesBuilder_ == null) { return storeFlushes_.get(index); } else { return storeFlushesBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public Builder setStoreFlushes( int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor value) { if (storeFlushesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStoreFlushesIsMutable(); storeFlushes_.set(index, value); onChanged(); } else { storeFlushesBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public Builder setStoreFlushes( int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder builderForValue) { if (storeFlushesBuilder_ == null) { ensureStoreFlushesIsMutable(); storeFlushes_.set(index, builderForValue.build()); onChanged(); } else { storeFlushesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public Builder addStoreFlushes(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor value) { if (storeFlushesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStoreFlushesIsMutable(); storeFlushes_.add(value); onChanged(); } else { storeFlushesBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public Builder addStoreFlushes( int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor value) { if (storeFlushesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStoreFlushesIsMutable(); storeFlushes_.add(index, value); onChanged(); } else { storeFlushesBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public Builder addStoreFlushes( org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder builderForValue) { if (storeFlushesBuilder_ == null) { ensureStoreFlushesIsMutable(); storeFlushes_.add(builderForValue.build()); onChanged(); } else { storeFlushesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public Builder addStoreFlushes( int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder builderForValue) { if (storeFlushesBuilder_ == null) { ensureStoreFlushesIsMutable(); storeFlushes_.add(index, builderForValue.build()); onChanged(); } else { storeFlushesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public Builder addAllStoreFlushes( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor> values) { if (storeFlushesBuilder_ == null) { ensureStoreFlushesIsMutable(); super.addAll(values, storeFlushes_); onChanged(); } else { storeFlushesBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public Builder clearStoreFlushes() { if (storeFlushesBuilder_ == null) { storeFlushes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); } else { storeFlushesBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public Builder removeStoreFlushes(int index) { if (storeFlushesBuilder_ == null) { ensureStoreFlushesIsMutable(); storeFlushes_.remove(index); onChanged(); } else { storeFlushesBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder getStoreFlushesBuilder( int index) { return getStoreFlushesFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder getStoreFlushesOrBuilder( int index) { if (storeFlushesBuilder_ == null) { return storeFlushes_.get(index); } else { return storeFlushesBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder> getStoreFlushesOrBuilderList() { if (storeFlushesBuilder_ != null) { return storeFlushesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(storeFlushes_); } } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder addStoreFlushesBuilder() { return getStoreFlushesFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.getDefaultInstance()); } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder addStoreFlushesBuilder( int index) { return getStoreFlushesFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.getDefaultInstance()); } /** * <code>repeated .hbase.pb.FlushDescriptor.StoreFlushDescriptor store_flushes = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder> getStoreFlushesBuilderList() { return getStoreFlushesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder> getStoreFlushesFieldBuilder() { if (storeFlushesBuilder_ == null) { storeFlushesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptorOrBuilder>( storeFlushes_, ((bitField0_ & 0x00000010) == 0x00000010), getParentForChildren(), isClean()); storeFlushes_ = null; } return storeFlushesBuilder_; } // optional bytes region_name = 6; private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes region_name = 6;</code> * * <pre> * full region name * </pre> */ public boolean hasRegionName() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional bytes region_name = 6;</code> * * <pre> * full region name * </pre> */ public com.google.protobuf.ByteString getRegionName() { return regionName_; } /** * <code>optional bytes region_name = 6;</code> * * <pre> * full region name * </pre> */ public Builder setRegionName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000020; regionName_ = value; onChanged(); return this; } /** * <code>optional bytes region_name = 6;</code> * * <pre> * full region name * </pre> */ public Builder clearRegionName() { bitField0_ = (bitField0_ & ~0x00000020); regionName_ = getDefaultInstance().getRegionName(); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.FlushDescriptor) } static { defaultInstance = new FlushDescriptor(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.FlushDescriptor) } public interface StoreDescriptorOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bytes family_name = 1; /** * <code>required bytes family_name = 1;</code> */ boolean hasFamilyName(); /** * <code>required bytes family_name = 1;</code> */ com.google.protobuf.ByteString getFamilyName(); // required string store_home_dir = 2; /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ boolean hasStoreHomeDir(); /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ java.lang.String getStoreHomeDir(); /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ com.google.protobuf.ByteString getStoreHomeDirBytes(); // repeated string store_file = 3; /** * <code>repeated string store_file = 3;</code> * * <pre> * relative to store dir * </pre> */ java.util.List<java.lang.String> getStoreFileList(); /** * <code>repeated string store_file = 3;</code> * * <pre> * relative to store dir * </pre> */ int getStoreFileCount(); /** * <code>repeated string store_file = 3;</code> * * <pre> * relative to store dir * </pre> */ java.lang.String getStoreFile(int index); /** * <code>repeated string store_file = 3;</code> * * <pre> * relative to store dir * </pre> */ com.google.protobuf.ByteString getStoreFileBytes(int index); } /** * Protobuf type {@code hbase.pb.StoreDescriptor} */ public static final class StoreDescriptor extends com.google.protobuf.GeneratedMessage implements StoreDescriptorOrBuilder { // Use StoreDescriptor.newBuilder() to construct. private StoreDescriptor(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private StoreDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final StoreDescriptor defaultInstance; public static StoreDescriptor getDefaultInstance() { return defaultInstance; } public StoreDescriptor getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private StoreDescriptor( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; familyName_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; storeHomeDir_ = input.readBytes(); break; } case 26: { if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { storeFile_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000004; } storeFile_.add(input.readBytes()); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList(storeFile_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_StoreDescriptor_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_StoreDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder.class); } public static com.google.protobuf.Parser<StoreDescriptor> PARSER = new com.google.protobuf.AbstractParser<StoreDescriptor>() { public StoreDescriptor parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new StoreDescriptor(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<StoreDescriptor> getParserForType() { return PARSER; } private int bitField0_; // required bytes family_name = 1; public static final int FAMILY_NAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString familyName_; /** * <code>required bytes family_name = 1;</code> */ public boolean hasFamilyName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes family_name = 1;</code> */ public com.google.protobuf.ByteString getFamilyName() { return familyName_; } // required string store_home_dir = 2; public static final int STORE_HOME_DIR_FIELD_NUMBER = 2; private java.lang.Object storeHomeDir_; /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ public boolean hasStoreHomeDir() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ public java.lang.String getStoreHomeDir() { java.lang.Object ref = storeHomeDir_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { storeHomeDir_ = s; } return s; } } /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ public com.google.protobuf.ByteString getStoreHomeDirBytes() { java.lang.Object ref = storeHomeDir_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); storeHomeDir_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // repeated string store_file = 3; public static final int STORE_FILE_FIELD_NUMBER = 3; private com.google.protobuf.LazyStringList storeFile_; /** * <code>repeated string store_file = 3;</code> * * <pre> * relative to store dir * </pre> */ public java.util.List<java.lang.String> getStoreFileList() { return storeFile_; } /** * <code>repeated string store_file = 3;</code> * * <pre> * relative to store dir * </pre> */ public int getStoreFileCount() { return storeFile_.size(); } /** * <code>repeated string store_file = 3;</code> * * <pre> * relative to store dir * </pre> */ public java.lang.String getStoreFile(int index) { return storeFile_.get(index); } /** * <code>repeated string store_file = 3;</code> * * <pre> * relative to store dir * </pre> */ public com.google.protobuf.ByteString getStoreFileBytes(int index) { return storeFile_.getByteString(index); } private void initFields() { familyName_ = com.google.protobuf.ByteString.EMPTY; storeHomeDir_ = ""; storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasFamilyName()) { memoizedIsInitialized = 0; return false; } if (!hasStoreHomeDir()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, familyName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, getStoreHomeDirBytes()); } for (int i = 0; i < storeFile_.size(); i++) { output.writeBytes(3, storeFile_.getByteString(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, familyName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, getStoreHomeDirBytes()); } { int dataSize = 0; for (int i = 0; i < storeFile_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream .computeBytesSizeNoTag(storeFile_.getByteString(i)); } size += dataSize; size += 1 * getStoreFileList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor) obj; boolean result = true; result = result && (hasFamilyName() == other.hasFamilyName()); if (hasFamilyName()) { result = result && getFamilyName() .equals(other.getFamilyName()); } result = result && (hasStoreHomeDir() == other.hasStoreHomeDir()); if (hasStoreHomeDir()) { result = result && getStoreHomeDir() .equals(other.getStoreHomeDir()); } result = result && getStoreFileList() .equals(other.getStoreFileList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFamilyName()) { hash = (37 * hash) + FAMILY_NAME_FIELD_NUMBER; hash = (53 * hash) + getFamilyName().hashCode(); } if (hasStoreHomeDir()) { hash = (37 * hash) + STORE_HOME_DIR_FIELD_NUMBER; hash = (53 * hash) + getStoreHomeDir().hashCode(); } if (getStoreFileCount() > 0) { hash = (37 * hash) + STORE_FILE_FIELD_NUMBER; hash = (53 * hash) + getStoreFileList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.StoreDescriptor} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_StoreDescriptor_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_StoreDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); familyName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); storeHomeDir_ = ""; bitField0_ = (bitField0_ & ~0x00000002); storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_StoreDescriptor_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor build() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor buildPartial() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.familyName_ = familyName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.storeHomeDir_ = storeHomeDir_; if (((bitField0_ & 0x00000004) == 0x00000004)) { storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList( storeFile_); bitField0_ = (bitField0_ & ~0x00000004); } result.storeFile_ = storeFile_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor other) { if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance()) return this; if (other.hasFamilyName()) { setFamilyName(other.getFamilyName()); } if (other.hasStoreHomeDir()) { bitField0_ |= 0x00000002; storeHomeDir_ = other.storeHomeDir_; onChanged(); } if (!other.storeFile_.isEmpty()) { if (storeFile_.isEmpty()) { storeFile_ = other.storeFile_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureStoreFileIsMutable(); storeFile_.addAll(other.storeFile_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasFamilyName()) { return false; } if (!hasStoreHomeDir()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required bytes family_name = 1; private com.google.protobuf.ByteString familyName_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes family_name = 1;</code> */ public boolean hasFamilyName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required bytes family_name = 1;</code> */ public com.google.protobuf.ByteString getFamilyName() { return familyName_; } /** * <code>required bytes family_name = 1;</code> */ public Builder setFamilyName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; familyName_ = value; onChanged(); return this; } /** * <code>required bytes family_name = 1;</code> */ public Builder clearFamilyName() { bitField0_ = (bitField0_ & ~0x00000001); familyName_ = getDefaultInstance().getFamilyName(); onChanged(); return this; } // required string store_home_dir = 2; private java.lang.Object storeHomeDir_ = ""; /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ public boolean hasStoreHomeDir() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ public java.lang.String getStoreHomeDir() { java.lang.Object ref = storeHomeDir_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); storeHomeDir_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ public com.google.protobuf.ByteString getStoreHomeDirBytes() { java.lang.Object ref = storeHomeDir_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); storeHomeDir_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ public Builder setStoreHomeDir( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; storeHomeDir_ = value; onChanged(); return this; } /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ public Builder clearStoreHomeDir() { bitField0_ = (bitField0_ & ~0x00000002); storeHomeDir_ = getDefaultInstance().getStoreHomeDir(); onChanged(); return this; } /** * <code>required string store_home_dir = 2;</code> * * <pre> *relative to region dir * </pre> */ public Builder setStoreHomeDirBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; storeHomeDir_ = value; onChanged(); return this; } // repeated string store_file = 3; private com.google.protobuf.LazyStringList storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureStoreFileIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { storeFile_ = new com.google.protobuf.LazyStringArrayList(storeFile_); bitField0_ |= 0x00000004; } } /** * <code>repeated string store_file = 3;</code> * * <pre> * relative to store dir * </pre> */ public java.util.List<java.lang.String> getStoreFileList() { return java.util.Collections.unmodifiableList(storeFile_); } /** * <code>repeated string store_file = 3;</code> * * <pre> * relative to store dir * </pre> */ public int getStoreFileCount() { return storeFile_.size(); } /** * <code>repeated string store_file = 3;</code> * * <pre> * relative to store dir * </pre> */ public java.lang.String getStoreFile(int index) { return storeFile_.get(index); } /** * <code>repeated string store_file = 3;</code> * * <pre> * relative to store dir * </pre> */ public com.google.protobuf.ByteString getStoreFileBytes(int index) { return storeFile_.getByteString(index); } /** * <code>repeated string store_file = 3;</code> * * <pre> * relative to store dir * </pre> */ public Builder setStoreFile( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureStoreFileIsMutable(); storeFile_.set(index, value); onChanged(); return this; } /** * <code>repeated string store_file = 3;</code> * * <pre> * relative to store dir * </pre> */ public Builder addStoreFile( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureStoreFileIsMutable(); storeFile_.add(value); onChanged(); return this; } /** * <code>repeated string store_file = 3;</code> * * <pre> * relative to store dir * </pre> */ public Builder addAllStoreFile( java.lang.Iterable<java.lang.String> values) { ensureStoreFileIsMutable(); super.addAll(values, storeFile_); onChanged(); return this; } /** * <code>repeated string store_file = 3;</code> * * <pre> * relative to store dir * </pre> */ public Builder clearStoreFile() { storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * <code>repeated string store_file = 3;</code> * * <pre> * relative to store dir * </pre> */ public Builder addStoreFileBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureStoreFileIsMutable(); storeFile_.add(value); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.StoreDescriptor) } static { defaultInstance = new StoreDescriptor(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.StoreDescriptor) } public interface BulkLoadDescriptorOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .hbase.pb.TableName table_name = 1; /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ boolean hasTableName(); /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(); /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); // required bytes encoded_region_name = 2; /** * <code>required bytes encoded_region_name = 2;</code> */ boolean hasEncodedRegionName(); /** * <code>required bytes encoded_region_name = 2;</code> */ com.google.protobuf.ByteString getEncodedRegionName(); // repeated .hbase.pb.StoreDescriptor stores = 3; /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> getStoresList(); /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index); /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ int getStoresCount(); /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> getStoresOrBuilderList(); /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder( int index); // required int64 bulkload_seq_num = 4; /** * <code>required int64 bulkload_seq_num = 4;</code> */ boolean hasBulkloadSeqNum(); /** * <code>required int64 bulkload_seq_num = 4;</code> */ long getBulkloadSeqNum(); } /** * Protobuf type {@code hbase.pb.BulkLoadDescriptor} * * <pre> ** * Special WAL entry used for writing bulk load events to WAL * </pre> */ public static final class BulkLoadDescriptor extends com.google.protobuf.GeneratedMessage implements BulkLoadDescriptorOrBuilder { // Use BulkLoadDescriptor.newBuilder() to construct. private BulkLoadDescriptor(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private BulkLoadDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final BulkLoadDescriptor defaultInstance; public static BulkLoadDescriptor getDefaultInstance() { return defaultInstance; } public BulkLoadDescriptor getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private BulkLoadDescriptor( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = tableName_.toBuilder(); } tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(tableName_); tableName_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000001; break; } case 18: { bitField0_ |= 0x00000002; encodedRegionName_ = input.readBytes(); break; } case 26: { if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { stores_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor>(); mutable_bitField0_ |= 0x00000004; } stores_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.PARSER, extensionRegistry)); break; } case 32: { bitField0_ |= 0x00000004; bulkloadSeqNum_ = input.readInt64(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { stores_ = java.util.Collections.unmodifiableList(stores_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_BulkLoadDescriptor_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_BulkLoadDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.Builder.class); } public static com.google.protobuf.Parser<BulkLoadDescriptor> PARSER = new com.google.protobuf.AbstractParser<BulkLoadDescriptor>() { public BulkLoadDescriptor parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new BulkLoadDescriptor(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<BulkLoadDescriptor> getParserForType() { return PARSER; } private int bitField0_; // required .hbase.pb.TableName table_name = 1; public static final int TABLE_NAME_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_; /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { return tableName_; } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { return tableName_; } // required bytes encoded_region_name = 2; public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 2; private com.google.protobuf.ByteString encodedRegionName_; /** * <code>required bytes encoded_region_name = 2;</code> */ public boolean hasEncodedRegionName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes encoded_region_name = 2;</code> */ public com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } // repeated .hbase.pb.StoreDescriptor stores = 3; public static final int STORES_FIELD_NUMBER = 3; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> stores_; /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> getStoresList() { return stores_; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> getStoresOrBuilderList() { return stores_; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public int getStoresCount() { return stores_.size(); } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index) { return stores_.get(index); } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder( int index) { return stores_.get(index); } // required int64 bulkload_seq_num = 4; public static final int BULKLOAD_SEQ_NUM_FIELD_NUMBER = 4; private long bulkloadSeqNum_; /** * <code>required int64 bulkload_seq_num = 4;</code> */ public boolean hasBulkloadSeqNum() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required int64 bulkload_seq_num = 4;</code> */ public long getBulkloadSeqNum() { return bulkloadSeqNum_; } private void initFields() { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; stores_ = java.util.Collections.emptyList(); bulkloadSeqNum_ = 0L; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!hasEncodedRegionName()) { memoizedIsInitialized = 0; return false; } if (!hasBulkloadSeqNum()) { memoizedIsInitialized = 0; return false; } if (!getTableName().isInitialized()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getStoresCount(); i++) { if (!getStores(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, encodedRegionName_); } for (int i = 0; i < stores_.size(); i++) { output.writeMessage(3, stores_.get(i)); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeInt64(4, bulkloadSeqNum_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tableName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, encodedRegionName_); } for (int i = 0; i < stores_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(3, stores_.get(i)); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(4, bulkloadSeqNum_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor) obj; boolean result = true; result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); if (hasEncodedRegionName()) { result = result && getEncodedRegionName() .equals(other.getEncodedRegionName()); } result = result && getStoresList() .equals(other.getStoresList()); result = result && (hasBulkloadSeqNum() == other.hasBulkloadSeqNum()); if (hasBulkloadSeqNum()) { result = result && (getBulkloadSeqNum() == other.getBulkloadSeqNum()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (hasEncodedRegionName()) { hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getEncodedRegionName().hashCode(); } if (getStoresCount() > 0) { hash = (37 * hash) + STORES_FIELD_NUMBER; hash = (53 * hash) + getStoresList().hashCode(); } if (hasBulkloadSeqNum()) { hash = (37 * hash) + BULKLOAD_SEQ_NUM_FIELD_NUMBER; hash = (53 * hash) + hashLong(getBulkloadSeqNum()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.BulkLoadDescriptor} * * <pre> ** * Special WAL entry used for writing bulk load events to WAL * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_BulkLoadDescriptor_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_BulkLoadDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTableNameFieldBuilder(); getStoresFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); if (storesBuilder_ == null) { stores_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); } else { storesBuilder_.clear(); } bulkloadSeqNum_ = 0L; bitField0_ = (bitField0_ & ~0x00000008); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_BulkLoadDescriptor_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor build() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor buildPartial() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (tableNameBuilder_ == null) { result.tableName_ = tableName_; } else { result.tableName_ = tableNameBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.encodedRegionName_ = encodedRegionName_; if (storesBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004)) { stores_ = java.util.Collections.unmodifiableList(stores_); bitField0_ = (bitField0_ & ~0x00000004); } result.stores_ = stores_; } else { result.stores_ = storesBuilder_.build(); } if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000004; } result.bulkloadSeqNum_ = bulkloadSeqNum_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor other) { if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor.getDefaultInstance()) return this; if (other.hasTableName()) { mergeTableName(other.getTableName()); } if (other.hasEncodedRegionName()) { setEncodedRegionName(other.getEncodedRegionName()); } if (storesBuilder_ == null) { if (!other.stores_.isEmpty()) { if (stores_.isEmpty()) { stores_ = other.stores_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureStoresIsMutable(); stores_.addAll(other.stores_); } onChanged(); } } else { if (!other.stores_.isEmpty()) { if (storesBuilder_.isEmpty()) { storesBuilder_.dispose(); storesBuilder_ = null; stores_ = other.stores_; bitField0_ = (bitField0_ & ~0x00000004); storesBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getStoresFieldBuilder() : null; } else { storesBuilder_.addAllMessages(other.stores_); } } } if (other.hasBulkloadSeqNum()) { setBulkloadSeqNum(other.getBulkloadSeqNum()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasTableName()) { return false; } if (!hasEncodedRegionName()) { return false; } if (!hasBulkloadSeqNum()) { return false; } if (!getTableName().isInitialized()) { return false; } for (int i = 0; i < getStoresCount(); i++) { if (!getStores(i).isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .hbase.pb.TableName table_name = 1; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { if (tableNameBuilder_ == null) { return tableName_; } else { return tableNameBuilder_.getMessage(); } } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (value == null) { throw new NullPointerException(); } tableName_ = value; onChanged(); } else { tableNameBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public Builder setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { if (tableNameBuilder_ == null) { tableName_ = builderForValue.build(); onChanged(); } else { tableNameBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { if (tableNameBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); } else { tableName_ = value; } onChanged(); } else { tableNameBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public Builder clearTableName() { if (tableNameBuilder_ == null) { tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); onChanged(); } else { tableNameBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTableNameFieldBuilder().getBuilder(); } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { if (tableNameBuilder_ != null) { return tableNameBuilder_.getMessageOrBuilder(); } else { return tableName_; } } /** * <code>required .hbase.pb.TableName table_name = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder() { if (tableNameBuilder_ == null) { tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>( tableName_, getParentForChildren(), isClean()); tableName_ = null; } return tableNameBuilder_; } // required bytes encoded_region_name = 2; private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes encoded_region_name = 2;</code> */ public boolean hasEncodedRegionName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes encoded_region_name = 2;</code> */ public com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } /** * <code>required bytes encoded_region_name = 2;</code> */ public Builder setEncodedRegionName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; encodedRegionName_ = value; onChanged(); return this; } /** * <code>required bytes encoded_region_name = 2;</code> */ public Builder clearEncodedRegionName() { bitField0_ = (bitField0_ & ~0x00000002); encodedRegionName_ = getDefaultInstance().getEncodedRegionName(); onChanged(); return this; } // repeated .hbase.pb.StoreDescriptor stores = 3; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> stores_ = java.util.Collections.emptyList(); private void ensureStoresIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { stores_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor>(stores_); bitField0_ |= 0x00000004; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> storesBuilder_; /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> getStoresList() { if (storesBuilder_ == null) { return java.util.Collections.unmodifiableList(stores_); } else { return storesBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public int getStoresCount() { if (storesBuilder_ == null) { return stores_.size(); } else { return storesBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index) { if (storesBuilder_ == null) { return stores_.get(index); } else { return storesBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public Builder setStores( int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) { if (storesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStoresIsMutable(); stores_.set(index, value); onChanged(); } else { storesBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public Builder setStores( int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) { if (storesBuilder_ == null) { ensureStoresIsMutable(); stores_.set(index, builderForValue.build()); onChanged(); } else { storesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public Builder addStores(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) { if (storesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStoresIsMutable(); stores_.add(value); onChanged(); } else { storesBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public Builder addStores( int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) { if (storesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStoresIsMutable(); stores_.add(index, value); onChanged(); } else { storesBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public Builder addStores( org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) { if (storesBuilder_ == null) { ensureStoresIsMutable(); stores_.add(builderForValue.build()); onChanged(); } else { storesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public Builder addStores( int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) { if (storesBuilder_ == null) { ensureStoresIsMutable(); stores_.add(index, builderForValue.build()); onChanged(); } else { storesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public Builder addAllStores( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> values) { if (storesBuilder_ == null) { ensureStoresIsMutable(); super.addAll(values, stores_); onChanged(); } else { storesBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public Builder clearStores() { if (storesBuilder_ == null) { stores_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { storesBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public Builder removeStores(int index) { if (storesBuilder_ == null) { ensureStoresIsMutable(); stores_.remove(index); onChanged(); } else { storesBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder getStoresBuilder( int index) { return getStoresFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder( int index) { if (storesBuilder_ == null) { return stores_.get(index); } else { return storesBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> getStoresOrBuilderList() { if (storesBuilder_ != null) { return storesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(stores_); } } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder addStoresBuilder() { return getStoresFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance()); } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder addStoresBuilder( int index) { return getStoresFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance()); } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 3;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder> getStoresBuilderList() { return getStoresFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> getStoresFieldBuilder() { if (storesBuilder_ == null) { storesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder>( stores_, ((bitField0_ & 0x00000004) == 0x00000004), getParentForChildren(), isClean()); stores_ = null; } return storesBuilder_; } // required int64 bulkload_seq_num = 4; private long bulkloadSeqNum_ ; /** * <code>required int64 bulkload_seq_num = 4;</code> */ public boolean hasBulkloadSeqNum() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>required int64 bulkload_seq_num = 4;</code> */ public long getBulkloadSeqNum() { return bulkloadSeqNum_; } /** * <code>required int64 bulkload_seq_num = 4;</code> */ public Builder setBulkloadSeqNum(long value) { bitField0_ |= 0x00000008; bulkloadSeqNum_ = value; onChanged(); return this; } /** * <code>required int64 bulkload_seq_num = 4;</code> */ public Builder clearBulkloadSeqNum() { bitField0_ = (bitField0_ & ~0x00000008); bulkloadSeqNum_ = 0L; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.BulkLoadDescriptor) } static { defaultInstance = new BulkLoadDescriptor(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadDescriptor) } public interface RegionEventDescriptorOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .hbase.pb.RegionEventDescriptor.EventType event_type = 1; /** * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code> */ boolean hasEventType(); /** * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code> */ org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType(); // required bytes table_name = 2; /** * <code>required bytes table_name = 2;</code> */ boolean hasTableName(); /** * <code>required bytes table_name = 2;</code> */ com.google.protobuf.ByteString getTableName(); // required bytes encoded_region_name = 3; /** * <code>required bytes encoded_region_name = 3;</code> */ boolean hasEncodedRegionName(); /** * <code>required bytes encoded_region_name = 3;</code> */ com.google.protobuf.ByteString getEncodedRegionName(); // optional uint64 log_sequence_number = 4; /** * <code>optional uint64 log_sequence_number = 4;</code> */ boolean hasLogSequenceNumber(); /** * <code>optional uint64 log_sequence_number = 4;</code> */ long getLogSequenceNumber(); // repeated .hbase.pb.StoreDescriptor stores = 5; /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> getStoresList(); /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index); /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ int getStoresCount(); /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> getStoresOrBuilderList(); /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder( int index); // optional .hbase.pb.ServerName server = 6; /** * <code>optional .hbase.pb.ServerName server = 6;</code> * * <pre> * Server who opened the region * </pre> */ boolean hasServer(); /** * <code>optional .hbase.pb.ServerName server = 6;</code> * * <pre> * Server who opened the region * </pre> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer(); /** * <code>optional .hbase.pb.ServerName server = 6;</code> * * <pre> * Server who opened the region * </pre> */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); // optional bytes region_name = 7; /** * <code>optional bytes region_name = 7;</code> * * <pre> * full region name * </pre> */ boolean hasRegionName(); /** * <code>optional bytes region_name = 7;</code> * * <pre> * full region name * </pre> */ com.google.protobuf.ByteString getRegionName(); } /** * Protobuf type {@code hbase.pb.RegionEventDescriptor} * * <pre> ** * Special WAL entry to hold all related to a region event (open/close). * </pre> */ public static final class RegionEventDescriptor extends com.google.protobuf.GeneratedMessage implements RegionEventDescriptorOrBuilder { // Use RegionEventDescriptor.newBuilder() to construct. private RegionEventDescriptor(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private RegionEventDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final RegionEventDescriptor defaultInstance; public static RegionEventDescriptor getDefaultInstance() { return defaultInstance; } public RegionEventDescriptor getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RegionEventDescriptor( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { int rawValue = input.readEnum(); org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType value = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; eventType_ = value; } break; } case 18: { bitField0_ |= 0x00000002; tableName_ = input.readBytes(); break; } case 26: { bitField0_ |= 0x00000004; encodedRegionName_ = input.readBytes(); break; } case 32: { bitField0_ |= 0x00000008; logSequenceNumber_ = input.readUInt64(); break; } case 42: { if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { stores_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor>(); mutable_bitField0_ |= 0x00000010; } stores_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.PARSER, extensionRegistry)); break; } case 50: { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; if (((bitField0_ & 0x00000010) == 0x00000010)) { subBuilder = server_.toBuilder(); } server_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(server_); server_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000010; break; } case 58: { bitField0_ |= 0x00000020; regionName_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { stores_ = java.util.Collections.unmodifiableList(stores_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_RegionEventDescriptor_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_RegionEventDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.Builder.class); } public static com.google.protobuf.Parser<RegionEventDescriptor> PARSER = new com.google.protobuf.AbstractParser<RegionEventDescriptor>() { public RegionEventDescriptor parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new RegionEventDescriptor(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<RegionEventDescriptor> getParserForType() { return PARSER; } /** * Protobuf enum {@code hbase.pb.RegionEventDescriptor.EventType} */ public enum EventType implements com.google.protobuf.ProtocolMessageEnum { /** * <code>REGION_OPEN = 0;</code> */ REGION_OPEN(0, 0), /** * <code>REGION_CLOSE = 1;</code> */ REGION_CLOSE(1, 1), ; /** * <code>REGION_OPEN = 0;</code> */ public static final int REGION_OPEN_VALUE = 0; /** * <code>REGION_CLOSE = 1;</code> */ public static final int REGION_CLOSE_VALUE = 1; public final int getNumber() { return value; } public static EventType valueOf(int value) { switch (value) { case 0: return REGION_OPEN; case 1: return REGION_CLOSE; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<EventType> internalGetValueMap() { return internalValueMap; } private static com.google.protobuf.Internal.EnumLiteMap<EventType> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<EventType>() { public EventType findValueByNumber(int number) { return EventType.valueOf(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.getDescriptor().getEnumTypes().get(0); } private static final EventType[] VALUES = values(); public static EventType valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int index; private final int value; private EventType(int index, int value) { this.index = index; this.value = value; } // @@protoc_insertion_point(enum_scope:hbase.pb.RegionEventDescriptor.EventType) } private int bitField0_; // required .hbase.pb.RegionEventDescriptor.EventType event_type = 1; public static final int EVENT_TYPE_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType eventType_; /** * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code> */ public boolean hasEventType() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType() { return eventType_; } // required bytes table_name = 2; public static final int TABLE_NAME_FIELD_NUMBER = 2; private com.google.protobuf.ByteString tableName_; /** * <code>required bytes table_name = 2;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes table_name = 2;</code> */ public com.google.protobuf.ByteString getTableName() { return tableName_; } // required bytes encoded_region_name = 3; public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 3; private com.google.protobuf.ByteString encodedRegionName_; /** * <code>required bytes encoded_region_name = 3;</code> */ public boolean hasEncodedRegionName() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required bytes encoded_region_name = 3;</code> */ public com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } // optional uint64 log_sequence_number = 4; public static final int LOG_SEQUENCE_NUMBER_FIELD_NUMBER = 4; private long logSequenceNumber_; /** * <code>optional uint64 log_sequence_number = 4;</code> */ public boolean hasLogSequenceNumber() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional uint64 log_sequence_number = 4;</code> */ public long getLogSequenceNumber() { return logSequenceNumber_; } // repeated .hbase.pb.StoreDescriptor stores = 5; public static final int STORES_FIELD_NUMBER = 5; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> stores_; /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> getStoresList() { return stores_; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> getStoresOrBuilderList() { return stores_; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public int getStoresCount() { return stores_.size(); } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index) { return stores_.get(index); } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder( int index) { return stores_.get(index); } // optional .hbase.pb.ServerName server = 6; public static final int SERVER_FIELD_NUMBER = 6; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_; /** * <code>optional .hbase.pb.ServerName server = 6;</code> * * <pre> * Server who opened the region * </pre> */ public boolean hasServer() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>optional .hbase.pb.ServerName server = 6;</code> * * <pre> * Server who opened the region * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { return server_; } /** * <code>optional .hbase.pb.ServerName server = 6;</code> * * <pre> * Server who opened the region * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { return server_; } // optional bytes region_name = 7; public static final int REGION_NAME_FIELD_NUMBER = 7; private com.google.protobuf.ByteString regionName_; /** * <code>optional bytes region_name = 7;</code> * * <pre> * full region name * </pre> */ public boolean hasRegionName() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional bytes region_name = 7;</code> * * <pre> * full region name * </pre> */ public com.google.protobuf.ByteString getRegionName() { return regionName_; } private void initFields() { eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN; tableName_ = com.google.protobuf.ByteString.EMPTY; encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; logSequenceNumber_ = 0L; stores_ = java.util.Collections.emptyList(); server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); regionName_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasEventType()) { memoizedIsInitialized = 0; return false; } if (!hasTableName()) { memoizedIsInitialized = 0; return false; } if (!hasEncodedRegionName()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getStoresCount(); i++) { if (!getStores(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasServer()) { if (!getServer().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeEnum(1, eventType_.getNumber()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, tableName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, encodedRegionName_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeUInt64(4, logSequenceNumber_); } for (int i = 0; i < stores_.size(); i++) { output.writeMessage(5, stores_.get(i)); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeMessage(6, server_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeBytes(7, regionName_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, eventType_.getNumber()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, tableName_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, encodedRegionName_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(4, logSequenceNumber_); } for (int i = 0; i < stores_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(5, stores_.get(i)); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(6, server_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(7, regionName_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor) obj; boolean result = true; result = result && (hasEventType() == other.hasEventType()); if (hasEventType()) { result = result && (getEventType() == other.getEventType()); } result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); } result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); if (hasEncodedRegionName()) { result = result && getEncodedRegionName() .equals(other.getEncodedRegionName()); } result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber()); if (hasLogSequenceNumber()) { result = result && (getLogSequenceNumber() == other.getLogSequenceNumber()); } result = result && getStoresList() .equals(other.getStoresList()); result = result && (hasServer() == other.hasServer()); if (hasServer()) { result = result && getServer() .equals(other.getServer()); } result = result && (hasRegionName() == other.hasRegionName()); if (hasRegionName()) { result = result && getRegionName() .equals(other.getRegionName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasEventType()) { hash = (37 * hash) + EVENT_TYPE_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getEventType()); } if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); } if (hasEncodedRegionName()) { hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getEncodedRegionName().hashCode(); } if (hasLogSequenceNumber()) { hash = (37 * hash) + LOG_SEQUENCE_NUMBER_FIELD_NUMBER; hash = (53 * hash) + hashLong(getLogSequenceNumber()); } if (getStoresCount() > 0) { hash = (37 * hash) + STORES_FIELD_NUMBER; hash = (53 * hash) + getStoresList().hashCode(); } if (hasServer()) { hash = (37 * hash) + SERVER_FIELD_NUMBER; hash = (53 * hash) + getServer().hashCode(); } if (hasRegionName()) { hash = (37 * hash) + REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getRegionName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.RegionEventDescriptor} * * <pre> ** * Special WAL entry to hold all related to a region event (open/close). * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptorOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_RegionEventDescriptor_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_RegionEventDescriptor_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getStoresFieldBuilder(); getServerFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN; bitField0_ = (bitField0_ & ~0x00000001); tableName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); logSequenceNumber_ = 0L; bitField0_ = (bitField0_ & ~0x00000008); if (storesBuilder_ == null) { stores_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000010); } else { storesBuilder_.clear(); } if (serverBuilder_ == null) { server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); } else { serverBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000020); regionName_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000040); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_RegionEventDescriptor_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor build() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor buildPartial() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.eventType_ = eventType_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.tableName_ = tableName_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.encodedRegionName_ = encodedRegionName_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.logSequenceNumber_ = logSequenceNumber_; if (storesBuilder_ == null) { if (((bitField0_ & 0x00000010) == 0x00000010)) { stores_ = java.util.Collections.unmodifiableList(stores_); bitField0_ = (bitField0_ & ~0x00000010); } result.stores_ = stores_; } else { result.stores_ = storesBuilder_.build(); } if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000010; } if (serverBuilder_ == null) { result.server_ = server_; } else { result.server_ = serverBuilder_.build(); } if (((from_bitField0_ & 0x00000040) == 0x00000040)) { to_bitField0_ |= 0x00000020; } result.regionName_ = regionName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor other) { if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.getDefaultInstance()) return this; if (other.hasEventType()) { setEventType(other.getEventType()); } if (other.hasTableName()) { setTableName(other.getTableName()); } if (other.hasEncodedRegionName()) { setEncodedRegionName(other.getEncodedRegionName()); } if (other.hasLogSequenceNumber()) { setLogSequenceNumber(other.getLogSequenceNumber()); } if (storesBuilder_ == null) { if (!other.stores_.isEmpty()) { if (stores_.isEmpty()) { stores_ = other.stores_; bitField0_ = (bitField0_ & ~0x00000010); } else { ensureStoresIsMutable(); stores_.addAll(other.stores_); } onChanged(); } } else { if (!other.stores_.isEmpty()) { if (storesBuilder_.isEmpty()) { storesBuilder_.dispose(); storesBuilder_ = null; stores_ = other.stores_; bitField0_ = (bitField0_ & ~0x00000010); storesBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getStoresFieldBuilder() : null; } else { storesBuilder_.addAllMessages(other.stores_); } } } if (other.hasServer()) { mergeServer(other.getServer()); } if (other.hasRegionName()) { setRegionName(other.getRegionName()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasEventType()) { return false; } if (!hasTableName()) { return false; } if (!hasEncodedRegionName()) { return false; } for (int i = 0; i < getStoresCount(); i++) { if (!getStores(i).isInitialized()) { return false; } } if (hasServer()) { if (!getServer().isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .hbase.pb.RegionEventDescriptor.EventType event_type = 1; private org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN; /** * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code> */ public boolean hasEventType() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType() { return eventType_; } /** * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code> */ public Builder setEventType(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; eventType_ = value; onChanged(); return this; } /** * <code>required .hbase.pb.RegionEventDescriptor.EventType event_type = 1;</code> */ public Builder clearEventType() { bitField0_ = (bitField0_ & ~0x00000001); eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN; onChanged(); return this; } // required bytes table_name = 2; private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes table_name = 2;</code> */ public boolean hasTableName() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required bytes table_name = 2;</code> */ public com.google.protobuf.ByteString getTableName() { return tableName_; } /** * <code>required bytes table_name = 2;</code> */ public Builder setTableName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; tableName_ = value; onChanged(); return this; } /** * <code>required bytes table_name = 2;</code> */ public Builder clearTableName() { bitField0_ = (bitField0_ & ~0x00000002); tableName_ = getDefaultInstance().getTableName(); onChanged(); return this; } // required bytes encoded_region_name = 3; private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes encoded_region_name = 3;</code> */ public boolean hasEncodedRegionName() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required bytes encoded_region_name = 3;</code> */ public com.google.protobuf.ByteString getEncodedRegionName() { return encodedRegionName_; } /** * <code>required bytes encoded_region_name = 3;</code> */ public Builder setEncodedRegionName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; encodedRegionName_ = value; onChanged(); return this; } /** * <code>required bytes encoded_region_name = 3;</code> */ public Builder clearEncodedRegionName() { bitField0_ = (bitField0_ & ~0x00000004); encodedRegionName_ = getDefaultInstance().getEncodedRegionName(); onChanged(); return this; } // optional uint64 log_sequence_number = 4; private long logSequenceNumber_ ; /** * <code>optional uint64 log_sequence_number = 4;</code> */ public boolean hasLogSequenceNumber() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>optional uint64 log_sequence_number = 4;</code> */ public long getLogSequenceNumber() { return logSequenceNumber_; } /** * <code>optional uint64 log_sequence_number = 4;</code> */ public Builder setLogSequenceNumber(long value) { bitField0_ |= 0x00000008; logSequenceNumber_ = value; onChanged(); return this; } /** * <code>optional uint64 log_sequence_number = 4;</code> */ public Builder clearLogSequenceNumber() { bitField0_ = (bitField0_ & ~0x00000008); logSequenceNumber_ = 0L; onChanged(); return this; } // repeated .hbase.pb.StoreDescriptor stores = 5; private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> stores_ = java.util.Collections.emptyList(); private void ensureStoresIsMutable() { if (!((bitField0_ & 0x00000010) == 0x00000010)) { stores_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor>(stores_); bitField0_ |= 0x00000010; } } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> storesBuilder_; /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> getStoresList() { if (storesBuilder_ == null) { return java.util.Collections.unmodifiableList(stores_); } else { return storesBuilder_.getMessageList(); } } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public int getStoresCount() { if (storesBuilder_ == null) { return stores_.size(); } else { return storesBuilder_.getCount(); } } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor getStores(int index) { if (storesBuilder_ == null) { return stores_.get(index); } else { return storesBuilder_.getMessage(index); } } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public Builder setStores( int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) { if (storesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStoresIsMutable(); stores_.set(index, value); onChanged(); } else { storesBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public Builder setStores( int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) { if (storesBuilder_ == null) { ensureStoresIsMutable(); stores_.set(index, builderForValue.build()); onChanged(); } else { storesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public Builder addStores(org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) { if (storesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStoresIsMutable(); stores_.add(value); onChanged(); } else { storesBuilder_.addMessage(value); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public Builder addStores( int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor value) { if (storesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureStoresIsMutable(); stores_.add(index, value); onChanged(); } else { storesBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public Builder addStores( org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) { if (storesBuilder_ == null) { ensureStoresIsMutable(); stores_.add(builderForValue.build()); onChanged(); } else { storesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public Builder addStores( int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder builderForValue) { if (storesBuilder_ == null) { ensureStoresIsMutable(); stores_.add(index, builderForValue.build()); onChanged(); } else { storesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public Builder addAllStores( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor> values) { if (storesBuilder_ == null) { ensureStoresIsMutable(); super.addAll(values, stores_); onChanged(); } else { storesBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public Builder clearStores() { if (storesBuilder_ == null) { stores_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); } else { storesBuilder_.clear(); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public Builder removeStores(int index) { if (storesBuilder_ == null) { ensureStoresIsMutable(); stores_.remove(index); onChanged(); } else { storesBuilder_.remove(index); } return this; } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder getStoresBuilder( int index) { return getStoresFieldBuilder().getBuilder(index); } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder getStoresOrBuilder( int index) { if (storesBuilder_ == null) { return stores_.get(index); } else { return storesBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> getStoresOrBuilderList() { if (storesBuilder_ != null) { return storesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(stores_); } } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder addStoresBuilder() { return getStoresFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance()); } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder addStoresBuilder( int index) { return getStoresFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.getDefaultInstance()); } /** * <code>repeated .hbase.pb.StoreDescriptor stores = 5;</code> */ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder> getStoresBuilderList() { return getStoresFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder> getStoresFieldBuilder() { if (storesBuilder_ == null) { storesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptorOrBuilder>( stores_, ((bitField0_ & 0x00000010) == 0x00000010), getParentForChildren(), isClean()); stores_ = null; } return storesBuilder_; } // optional .hbase.pb.ServerName server = 6; private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; /** * <code>optional .hbase.pb.ServerName server = 6;</code> * * <pre> * Server who opened the region * </pre> */ public boolean hasServer() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional .hbase.pb.ServerName server = 6;</code> * * <pre> * Server who opened the region * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { if (serverBuilder_ == null) { return server_; } else { return serverBuilder_.getMessage(); } } /** * <code>optional .hbase.pb.ServerName server = 6;</code> * * <pre> * Server who opened the region * </pre> */ public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (value == null) { throw new NullPointerException(); } server_ = value; onChanged(); } else { serverBuilder_.setMessage(value); } bitField0_ |= 0x00000020; return this; } /** * <code>optional .hbase.pb.ServerName server = 6;</code> * * <pre> * Server who opened the region * </pre> */ public Builder setServer( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { if (serverBuilder_ == null) { server_ = builderForValue.build(); onChanged(); } else { serverBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000020; return this; } /** * <code>optional .hbase.pb.ServerName server = 6;</code> * * <pre> * Server who opened the region * </pre> */ public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (((bitField0_ & 0x00000020) == 0x00000020) && server_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); } else { server_ = value; } onChanged(); } else { serverBuilder_.mergeFrom(value); } bitField0_ |= 0x00000020; return this; } /** * <code>optional .hbase.pb.ServerName server = 6;</code> * * <pre> * Server who opened the region * </pre> */ public Builder clearServer() { if (serverBuilder_ == null) { server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); onChanged(); } else { serverBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000020); return this; } /** * <code>optional .hbase.pb.ServerName server = 6;</code> * * <pre> * Server who opened the region * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() { bitField0_ |= 0x00000020; onChanged(); return getServerFieldBuilder().getBuilder(); } /** * <code>optional .hbase.pb.ServerName server = 6;</code> * * <pre> * Server who opened the region * </pre> */ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { if (serverBuilder_ != null) { return serverBuilder_.getMessageOrBuilder(); } else { return server_; } } /** * <code>optional .hbase.pb.ServerName server = 6;</code> * * <pre> * Server who opened the region * </pre> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getServerFieldBuilder() { if (serverBuilder_ == null) { serverBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( server_, getParentForChildren(), isClean()); server_ = null; } return serverBuilder_; } // optional bytes region_name = 7; private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY; /** * <code>optional bytes region_name = 7;</code> * * <pre> * full region name * </pre> */ public boolean hasRegionName() { return ((bitField0_ & 0x00000040) == 0x00000040); } /** * <code>optional bytes region_name = 7;</code> * * <pre> * full region name * </pre> */ public com.google.protobuf.ByteString getRegionName() { return regionName_; } /** * <code>optional bytes region_name = 7;</code> * * <pre> * full region name * </pre> */ public Builder setRegionName(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000040; regionName_ = value; onChanged(); return this; } /** * <code>optional bytes region_name = 7;</code> * * <pre> * full region name * </pre> */ public Builder clearRegionName() { bitField0_ = (bitField0_ & ~0x00000040); regionName_ = getDefaultInstance().getRegionName(); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.RegionEventDescriptor) } static { defaultInstance = new RegionEventDescriptor(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.RegionEventDescriptor) } public interface WALTrailerOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.WALTrailer} * * <pre> ** * A trailer that is appended to the end of a properly closed WAL file. * If missing, this is either a legacy or a corrupted WAL file. * N.B. This trailer currently doesn't contain any information and we * purposefully don't expose it in the WAL APIs. It's for future growth. * </pre> */ public static final class WALTrailer extends com.google.protobuf.GeneratedMessage implements WALTrailerOrBuilder { // Use WALTrailer.newBuilder() to construct. private WALTrailer(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private WALTrailer(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final WALTrailer defaultInstance; public static WALTrailer getDefaultInstance() { return defaultInstance; } public WALTrailer getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private WALTrailer( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALTrailer_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALTrailer_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer.Builder.class); } public static com.google.protobuf.Parser<WALTrailer> PARSER = new com.google.protobuf.AbstractParser<WALTrailer>() { public WALTrailer parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new WALTrailer(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<WALTrailer> getParserForType() { return PARSER; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer) obj; boolean result = true; result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code hbase.pb.WALTrailer} * * <pre> ** * A trailer that is appended to the end of a properly closed WAL file. * If missing, this is either a legacy or a corrupted WAL file. * N.B. This trailer currently doesn't contain any information and we * purposefully don't expose it in the WAL APIs. It's for future growth. * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailerOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALTrailer_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALTrailer_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer.Builder.class); } // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_hbase_pb_WALTrailer_descriptor; } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer build() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer buildPartial() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer other) { if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } // @@protoc_insertion_point(builder_scope:hbase.pb.WALTrailer) } static { defaultInstance = new WALTrailer(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:hbase.pb.WALTrailer) } private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_WALHeader_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_WALHeader_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_WALKey_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_WALKey_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FamilyScope_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_FamilyScope_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CompactionDescriptor_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_CompactionDescriptor_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FlushDescriptor_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_FlushDescriptor_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_StoreDescriptor_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_StoreDescriptor_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_BulkLoadDescriptor_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_BulkLoadDescriptor_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RegionEventDescriptor_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_RegionEventDescriptor_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_WALTrailer_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_WALTrailer_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\tWAL.proto\022\010hbase.pb\032\013HBase.proto\032\014Clie" + "nt.proto\"\217\001\n\tWALHeader\022\027\n\017has_compressio" + "n\030\001 \001(\010\022\026\n\016encryption_key\030\002 \001(\014\022\033\n\023has_t" + "ag_compression\030\003 \001(\010\022\027\n\017writer_cls_name\030" + "\004 \001(\t\022\033\n\023cell_codec_cls_name\030\005 \001(\t\"\273\002\n\006W" + "ALKey\022\033\n\023encoded_region_name\030\001 \002(\014\022\022\n\nta" + "ble_name\030\002 \002(\014\022\033\n\023log_sequence_number\030\003 " + "\002(\004\022\022\n\nwrite_time\030\004 \002(\004\022&\n\ncluster_id\030\005 " + "\001(\0132\016.hbase.pb.UUIDB\002\030\001\022%\n\006scopes\030\006 \003(\0132" + "\025.hbase.pb.FamilyScope\022\032\n\022following_kv_c", "ount\030\007 \001(\r\022#\n\013cluster_ids\030\010 \003(\0132\016.hbase." + "pb.UUID\022\022\n\nnonceGroup\030\t \001(\004\022\r\n\005nonce\030\n \001" + "(\004\022\034\n\024orig_sequence_number\030\013 \001(\004\"F\n\013Fami" + "lyScope\022\016\n\006family\030\001 \002(\014\022\'\n\nscope_type\030\002 " + "\002(\0162\023.hbase.pb.ScopeType\"\276\001\n\024CompactionD" + "escriptor\022\022\n\ntable_name\030\001 \002(\014\022\033\n\023encoded" + "_region_name\030\002 \002(\014\022\023\n\013family_name\030\003 \002(\014\022" + "\030\n\020compaction_input\030\004 \003(\t\022\031\n\021compaction_" + "output\030\005 \003(\t\022\026\n\016store_home_dir\030\006 \002(\t\022\023\n\013" + "region_name\030\007 \001(\014\"\244\003\n\017FlushDescriptor\0225\n", "\006action\030\001 \002(\0162%.hbase.pb.FlushDescriptor" + ".FlushAction\022\022\n\ntable_name\030\002 \002(\014\022\033\n\023enco" + "ded_region_name\030\003 \002(\014\022\035\n\025flush_sequence_" + "number\030\004 \001(\004\022E\n\rstore_flushes\030\005 \003(\0132..hb" + "ase.pb.FlushDescriptor.StoreFlushDescrip" + "tor\022\023\n\013region_name\030\006 \001(\014\032Y\n\024StoreFlushDe" + "scriptor\022\023\n\013family_name\030\001 \002(\014\022\026\n\016store_h" + "ome_dir\030\002 \002(\t\022\024\n\014flush_output\030\003 \003(\t\"S\n\013F" + "lushAction\022\017\n\013START_FLUSH\020\000\022\020\n\014COMMIT_FL" + "USH\020\001\022\017\n\013ABORT_FLUSH\020\002\022\020\n\014CANNOT_FLUSH\020\003", "\"R\n\017StoreDescriptor\022\023\n\013family_name\030\001 \002(\014" + "\022\026\n\016store_home_dir\030\002 \002(\t\022\022\n\nstore_file\030\003" + " \003(\t\"\237\001\n\022BulkLoadDescriptor\022\'\n\ntable_nam" + "e\030\001 \002(\0132\023.hbase.pb.TableName\022\033\n\023encoded_" + "region_name\030\002 \002(\014\022)\n\006stores\030\003 \003(\0132\031.hbas" + "e.pb.StoreDescriptor\022\030\n\020bulkload_seq_num" + "\030\004 \002(\003\"\272\002\n\025RegionEventDescriptor\022=\n\neven" + "t_type\030\001 \002(\0162).hbase.pb.RegionEventDescr" + "iptor.EventType\022\022\n\ntable_name\030\002 \002(\014\022\033\n\023e" + "ncoded_region_name\030\003 \002(\014\022\033\n\023log_sequence", "_number\030\004 \001(\004\022)\n\006stores\030\005 \003(\0132\031.hbase.pb" + ".StoreDescriptor\022$\n\006server\030\006 \001(\0132\024.hbase" + ".pb.ServerName\022\023\n\013region_name\030\007 \001(\014\".\n\tE" + "ventType\022\017\n\013REGION_OPEN\020\000\022\020\n\014REGION_CLOS" + "E\020\001\"\014\n\nWALTrailer*F\n\tScopeType\022\033\n\027REPLIC" + "ATION_SCOPE_LOCAL\020\000\022\034\n\030REPLICATION_SCOPE" + "_GLOBAL\020\001B?\n*org.apache.hadoop.hbase.pro" + "tobuf.generatedB\tWALProtosH\001\210\001\000\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; internal_static_hbase_pb_WALHeader_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_hbase_pb_WALHeader_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_WALHeader_descriptor, new java.lang.String[] { "HasCompression", "EncryptionKey", "HasTagCompression", "WriterClsName", "CellCodecClsName", }); internal_static_hbase_pb_WALKey_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_hbase_pb_WALKey_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_WALKey_descriptor, new java.lang.String[] { "EncodedRegionName", "TableName", "LogSequenceNumber", "WriteTime", "ClusterId", "Scopes", "FollowingKvCount", "ClusterIds", "NonceGroup", "Nonce", "OrigSequenceNumber", }); internal_static_hbase_pb_FamilyScope_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_hbase_pb_FamilyScope_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_FamilyScope_descriptor, new java.lang.String[] { "Family", "ScopeType", }); internal_static_hbase_pb_CompactionDescriptor_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_hbase_pb_CompactionDescriptor_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_CompactionDescriptor_descriptor, new java.lang.String[] { "TableName", "EncodedRegionName", "FamilyName", "CompactionInput", "CompactionOutput", "StoreHomeDir", "RegionName", }); internal_static_hbase_pb_FlushDescriptor_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_hbase_pb_FlushDescriptor_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_FlushDescriptor_descriptor, new java.lang.String[] { "Action", "TableName", "EncodedRegionName", "FlushSequenceNumber", "StoreFlushes", "RegionName", }); internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor = internal_static_hbase_pb_FlushDescriptor_descriptor.getNestedTypes().get(0); internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_FlushDescriptor_StoreFlushDescriptor_descriptor, new java.lang.String[] { "FamilyName", "StoreHomeDir", "FlushOutput", }); internal_static_hbase_pb_StoreDescriptor_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_hbase_pb_StoreDescriptor_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_StoreDescriptor_descriptor, new java.lang.String[] { "FamilyName", "StoreHomeDir", "StoreFile", }); internal_static_hbase_pb_BulkLoadDescriptor_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_hbase_pb_BulkLoadDescriptor_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_BulkLoadDescriptor_descriptor, new java.lang.String[] { "TableName", "EncodedRegionName", "Stores", "BulkloadSeqNum", }); internal_static_hbase_pb_RegionEventDescriptor_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_hbase_pb_RegionEventDescriptor_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_RegionEventDescriptor_descriptor, new java.lang.String[] { "EventType", "TableName", "EncodedRegionName", "LogSequenceNumber", "Stores", "Server", "RegionName", }); internal_static_hbase_pb_WALTrailer_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_hbase_pb_WALTrailer_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_WALTrailer_descriptor, new java.lang.String[] { }); return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), }, assigner); } // @@protoc_insertion_point(outer_class_scope) }
apache-2.0
peterzhu1688/mybatis-generator-core-1.3.5-fix
src/main/java/org/mybatis/generator/plugins/ToStringPlugin.java
4090
/** * Copyright ${license.git.copyrightYears} the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mybatis.generator.plugins; import static org.mybatis.generator.internal.util.StringUtility.isTrue; import java.util.List; import java.util.Properties; import org.mybatis.generator.api.IntrospectedTable; import org.mybatis.generator.api.PluginAdapter; import org.mybatis.generator.api.dom.java.Field; import org.mybatis.generator.api.dom.java.FullyQualifiedJavaType; import org.mybatis.generator.api.dom.java.JavaVisibility; import org.mybatis.generator.api.dom.java.Method; import org.mybatis.generator.api.dom.java.TopLevelClass; public class ToStringPlugin extends PluginAdapter { private boolean useToStringFromRoot; @Override public void setProperties(Properties properties) { super.setProperties(properties); useToStringFromRoot = isTrue(properties.getProperty("useToStringFromRoot")); } public boolean validate(List<String> warnings) { return true; } @Override public boolean modelBaseRecordClassGenerated(TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { generateToString(introspectedTable, topLevelClass); return true; } @Override public boolean modelRecordWithBLOBsClassGenerated( TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { generateToString(introspectedTable, topLevelClass); return true; } @Override public boolean modelPrimaryKeyClassGenerated(TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { generateToString(introspectedTable, topLevelClass); return true; } private void generateToString(IntrospectedTable introspectedTable, TopLevelClass topLevelClass) { Method method = new Method(); method.setVisibility(JavaVisibility.PUBLIC); method.setReturnType(FullyQualifiedJavaType.getStringInstance()); method.setName("toString"); //$NON-NLS-1$ if (introspectedTable.isJava5Targeted()) { method.addAnnotation("@Override"); //$NON-NLS-1$ } context.getCommentGenerator().addGeneralMethodComment(method, introspectedTable); method.addBodyLine("StringBuilder sb = new StringBuilder();"); //$NON-NLS-1$ method.addBodyLine("sb.append(getClass().getSimpleName());"); //$NON-NLS-1$ method.addBodyLine("sb.append(\" [\");"); //$NON-NLS-1$ method.addBodyLine("sb.append(\"Hash = \").append(hashCode());"); //$NON-NLS-1$ StringBuilder sb = new StringBuilder(); for (Field field : topLevelClass.getFields()) { String property = field.getName(); sb.setLength(0); sb.append("sb.append(\"").append(", ").append(property) //$NON-NLS-1$ //$NON-NLS-2$ .append("=\")").append(".append(").append(property) //$NON-NLS-1$ //$NON-NLS-2$ .append(");"); //$NON-NLS-1$ method.addBodyLine(sb.toString()); } method.addBodyLine("sb.append(\"]\");"); //$NON-NLS-1$ if (useToStringFromRoot && topLevelClass.getSuperClass() != null) { method.addBodyLine("sb.append(\", from super class \");"); //$NON-NLS-1$ method.addBodyLine("sb.append(super.toString());"); //$NON-NLS-1$ } method.addBodyLine("return sb.toString();"); //$NON-NLS-1$ topLevelClass.addMethod(method); } }
apache-2.0
dgutierr/kie-wb-common
kie-wb-common-widgets/kie-wb-decorated-grid-widget/src/main/java/org/kie/workbench/common/widgets/decoratedgrid/client/widget/cells/ProxyPopupDropDown.java
2376
/* * Copyright 2014 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.widgets.decoratedgrid.client.widget.cells; import com.google.gwt.cell.client.Cell; import com.google.gwt.dom.client.Element; import com.google.gwt.safehtml.shared.SafeHtmlBuilder; import com.google.gwt.text.shared.SafeHtmlRenderer; import com.google.gwt.user.client.ui.IsWidget; import org.drools.workbench.models.datamodel.oracle.DropDownData; /** * Definition of Cell that can be used by AbstractProxyPopupDropDownEditCell. */ public interface ProxyPopupDropDown<C> extends IsWidget { /** * Set value for cell * @param value */ void setValue( final C value ); /** * Set values for cell * @param dd */ void setDropDownData( final DropDownData dd ); /** * Render value as safe HTML * @param context * @param value * @param sb * @param renderer */ void render( final Cell.Context context, final C value, final SafeHtmlBuilder sb, final SafeHtmlRenderer<String> renderer ); /** * Return the new value entered within the cell * @return new value */ C getValue(); /** * Initiate editing within the "Popup". Implementations should populate the * child controls within the "Popup" before showing the Popup * <code>panel</code> * @param context * @param parent * @param value */ void startEditing( final Cell.Context context, final Element parent, final C value ); /** * Set focus to widget * @param focused */ void setFocus( final boolean focused ); String convertToString( final C value ); C convertFromString( final String value ); }
apache-2.0
zhan-xiong/buck
test/com/facebook/buck/artifact_cache/ArtifactCachesTest.java
9894
/* * Copyright 2015-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.artifact_cache; import static org.junit.Assert.assertThat; import com.facebook.buck.event.BuckEventBus; import com.facebook.buck.event.BuckEventBusForTests; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.testutil.FakeProjectFilesystem; import com.facebook.buck.testutil.integration.TemporaryPaths; import com.google.common.util.concurrent.MoreExecutors; import java.nio.file.Paths; import java.util.Optional; import org.hamcrest.Matchers; import org.junit.Rule; import org.junit.Test; public class ArtifactCachesTest { @Rule public TemporaryPaths tempDir = new TemporaryPaths(); @Test public void testCreateHttpCacheOnly() throws Exception { ArtifactCacheBuckConfig cacheConfig = ArtifactCacheBuckConfigTest.createFromText("[cache]", "mode = http"); ProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); BuckEventBus buckEventBus = BuckEventBusForTests.newInstance(); ArtifactCache artifactCache = new ArtifactCaches( cacheConfig, buckEventBus, projectFilesystem, Optional.empty(), MoreExecutors.newDirectExecutorService(), Optional.empty()) .newInstance(); assertThat(stripDecorators(artifactCache), Matchers.instanceOf(HttpArtifactCache.class)); } @Test public void testCreateDirCacheOnly() throws Exception { ArtifactCacheBuckConfig cacheConfig = ArtifactCacheBuckConfigTest.createFromText("[cache]", "mode = dir"); ProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); BuckEventBus buckEventBus = BuckEventBusForTests.newInstance(); ArtifactCache artifactCache = new ArtifactCaches( cacheConfig, buckEventBus, projectFilesystem, Optional.empty(), MoreExecutors.newDirectExecutorService(), Optional.empty()) .newInstance(); assertThat(stripDecorators(artifactCache), Matchers.instanceOf(DirArtifactCache.class)); } @Test public void testCreateSQLiteCacheOnly() throws Exception { ArtifactCacheBuckConfig cacheConfig = ArtifactCacheBuckConfigTest.createFromText( "[cache]", "mode = sqlite", "sqlite_cache_names = name1"); ProjectFilesystem projectFilesystem = new ProjectFilesystem(tempDir.getRoot()); BuckEventBus buckEventBus = BuckEventBusForTests.newInstance(); ArtifactCache artifactCache = new ArtifactCaches( cacheConfig, buckEventBus, projectFilesystem, Optional.empty(), MoreExecutors.newDirectExecutorService(), Optional.empty()) .newInstance(); assertThat(stripDecorators(artifactCache), Matchers.instanceOf(SQLiteArtifactCache.class)); } @Test public void testCreateMultipleDirCaches() throws Exception { ArtifactCacheBuckConfig cacheConfig = ArtifactCacheBuckConfigTest.createFromText( "[cache]", "dir_cache_names = dir1, dir2", "[cache#dir1]", "dir = dir1", "dir_mode = readwrite", "[cache#dir2]", "dir = dir2", "dir_mode = readonly"); ProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); BuckEventBus buckEventBus = BuckEventBusForTests.newInstance(); ArtifactCache artifactCache = stripDecorators( new ArtifactCaches( cacheConfig, buckEventBus, projectFilesystem, Optional.empty(), MoreExecutors.newDirectExecutorService(), Optional.empty()) .newInstance()); assertThat(artifactCache, Matchers.instanceOf(MultiArtifactCache.class)); MultiArtifactCache multiArtifactCache = (MultiArtifactCache) artifactCache; assertThat(multiArtifactCache.getArtifactCaches().size(), Matchers.equalTo(2)); ArtifactCache c1 = stripDecorators(multiArtifactCache.getArtifactCaches().get(0)); ArtifactCache c2 = stripDecorators(multiArtifactCache.getArtifactCaches().get(1)); assertThat(c1, Matchers.instanceOf(DirArtifactCache.class)); assertThat(c2, Matchers.instanceOf(DirArtifactCache.class)); DirArtifactCache dir1 = (DirArtifactCache) c1; assertThat(dir1.getCacheDir(), Matchers.equalTo(Paths.get("dir1").toAbsolutePath())); assertThat(dir1.getCacheReadMode(), Matchers.equalTo(CacheReadMode.READWRITE)); DirArtifactCache dir2 = (DirArtifactCache) c2; assertThat(dir2.getCacheDir(), Matchers.equalTo(Paths.get("dir2").toAbsolutePath())); assertThat(dir2.getCacheReadMode(), Matchers.equalTo(CacheReadMode.READONLY)); } @Test public void testCreateMultipleSQLiteCaches() throws Exception { ArtifactCacheBuckConfig cacheConfig = ArtifactCacheBuckConfigTest.createFromText( "[cache]", "mode = sqlite", "sqlite_cache_names = name1, name2, name3", "[cache#name1]", "[cache#name2]", "sqlite_mode = readwrite", "[cache#name3]", "sqlite_mode = readonly"); ProjectFilesystem projectFilesystem = new ProjectFilesystem(tempDir.getRoot()); BuckEventBus buckEventBus = BuckEventBusForTests.newInstance(); ArtifactCache artifactCache = stripDecorators( new ArtifactCaches( cacheConfig, buckEventBus, projectFilesystem, Optional.empty(), MoreExecutors.newDirectExecutorService(), Optional.empty()) .newInstance()); assertThat(artifactCache, Matchers.instanceOf(MultiArtifactCache.class)); MultiArtifactCache multiArtifactCache = (MultiArtifactCache) artifactCache; assertThat(multiArtifactCache.getArtifactCaches().size(), Matchers.equalTo(3)); ArtifactCache c1 = stripDecorators(multiArtifactCache.getArtifactCaches().get(0)); ArtifactCache c2 = stripDecorators(multiArtifactCache.getArtifactCaches().get(1)); ArtifactCache c3 = stripDecorators(multiArtifactCache.getArtifactCaches().get(2)); assertThat(c1, Matchers.instanceOf(SQLiteArtifactCache.class)); assertThat(c2, Matchers.instanceOf(SQLiteArtifactCache.class)); assertThat(c3, Matchers.instanceOf(SQLiteArtifactCache.class)); SQLiteArtifactCache cache1 = (SQLiteArtifactCache) c1; assertThat(cache1.getCacheReadMode(), Matchers.equalTo(CacheReadMode.READWRITE)); SQLiteArtifactCache cache2 = (SQLiteArtifactCache) c2; assertThat(cache2.getCacheReadMode(), Matchers.equalTo(CacheReadMode.READWRITE)); SQLiteArtifactCache cache3 = (SQLiteArtifactCache) c3; assertThat(cache3.getCacheReadMode(), Matchers.equalTo(CacheReadMode.READONLY)); } @Test public void testCreateBoth() throws Exception { ArtifactCacheBuckConfig cacheConfig = ArtifactCacheBuckConfigTest.createFromText("[cache]", "mode = dir, http"); ProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); BuckEventBus buckEventBus = BuckEventBusForTests.newInstance(); ArtifactCache artifactCache = new ArtifactCaches( cacheConfig, buckEventBus, projectFilesystem, Optional.empty(), MoreExecutors.newDirectExecutorService(), Optional.empty()) .newInstance(); assertThat(stripDecorators(artifactCache), Matchers.instanceOf(MultiArtifactCache.class)); } @Test public void testCreateDirCacheOnlyWhenOnBlacklistedWifi() throws Exception { ArtifactCacheBuckConfig cacheConfig = ArtifactCacheBuckConfigTest.createFromText( "[cache]", "mode = dir, http", "blacklisted_wifi_ssids = weevil, evilwifi"); ProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); BuckEventBus buckEventBus = BuckEventBusForTests.newInstance(); ArtifactCache artifactCache = new ArtifactCaches( cacheConfig, buckEventBus, projectFilesystem, Optional.of("evilwifi"), MoreExecutors.newDirectExecutorService(), Optional.empty()) .newInstance(); assertThat(stripDecorators(artifactCache), Matchers.instanceOf(DirArtifactCache.class)); } private static ArtifactCache stripDecorators(ArtifactCache artifactCache) { if (artifactCache instanceof LoggingArtifactCacheDecorator) { LoggingArtifactCacheDecorator cacheDecorator = (LoggingArtifactCacheDecorator) artifactCache; return stripDecorators(cacheDecorator.getDelegate()); } if (artifactCache instanceof TwoLevelArtifactCacheDecorator) { TwoLevelArtifactCacheDecorator cacheDecorator = (TwoLevelArtifactCacheDecorator) artifactCache; return stripDecorators(cacheDecorator.getDelegate()); } if (artifactCache instanceof RetryingNetworkCache) { RetryingNetworkCache cacheDecorator = (RetryingNetworkCache) artifactCache; return stripDecorators(cacheDecorator.getDelegate()); } return artifactCache; } }
apache-2.0
robUx4/LittleProxy
src/test/java/org/littleshoot/proxy/EndToEndStoppingTest.java
6491
package org.littleshoot.proxy; import io.netty.handler.codec.http.HttpObject; import io.netty.handler.codec.http.HttpRequest; import org.apache.commons.io.IOUtils; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; import org.apache.http.util.EntityUtils; import org.junit.Test; import org.littleshoot.proxy.impl.DefaultHttpProxyServer; import org.openqa.selenium.Proxy; import org.openqa.selenium.WebDriver; import org.openqa.selenium.firefox.FirefoxDriver; import org.openqa.selenium.remote.CapabilityType; import org.openqa.selenium.remote.DesiredCapabilities; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; /** * End to end test making sure the proxy is able to service simple HTTP requests * and stop at the end. Made into a unit test from isopov and nasis's * contributions at: https://github.com/adamfisk/LittleProxy/issues/36 */ public class EndToEndStoppingTest { private final Logger log = LoggerFactory.getLogger(getClass()); /** * This is a quick test from nasis that exhibits different behavior from * unit tests because unit tests call System.exit(). The stop method should * stop all non-daemon threads and should cause the JVM to exit without * explicitly calling System.exit(), which running as an application * properly tests. */ public static void main(final String[] args) throws Exception { int port = 9090; HttpProxyServer proxyServer = DefaultHttpProxyServer.bootstrap() .withPort(port) .start(); Proxy proxy = new Proxy(); proxy.setProxyType(Proxy.ProxyType.MANUAL); String proxyStr = String.format("localhost:%d", port); proxy.setHttpProxy(proxyStr); proxy.setSslProxy(proxyStr); DesiredCapabilities capability = DesiredCapabilities.firefox(); capability.setCapability(CapabilityType.PROXY, proxy); String urlString = "http://www.yahoo.com/"; WebDriver driver = new FirefoxDriver(capability); driver.manage().timeouts().pageLoadTimeout(30, TimeUnit.SECONDS); driver.get(urlString); driver.close(); System.out.println("Driver closed"); proxyServer.stop(); System.out.println("Proxy stopped"); } @Test public void testWithHttpClient() throws Exception { // final String url = "https://www.exceptional.io/api/errors?" + // "api_key="+"9848f38fb5ad1db0784675b75b9152c87dc1eb95"+"&protocol_version=6"; final String url = "https://www.exceptional.io"; final String[] sites = { url };// "https://www.google.com.ua"};//"https://exceptional.io"};//"http://www.google.com.ua"}; for (final String site : sites) { runSiteTestWithHttpClient(site); } } private void runSiteTestWithHttpClient(final String site) throws Exception { final int PROXY_PORT = 9097; final HttpClient client = TestUtils.createProxiedHttpClient(PROXY_PORT); // final HttpPost get = new HttpPost(site); final HttpGet get = new HttpGet(site); // HttpResponse response = client.execute(get); // assertEquals(200, response.getStatusLine().getStatusCode()); // EntityUtils.consume(response.getEntity()); /* * final HttpProxyServer ssl = new DefaultHttpProxyServer(PROXY_PORT, * null, null, new SslHandshakeHandlerFactory(), new HttpRequestFilter() * { * * @Override public void filter(HttpRequest httpRequest) { * System.out.println("Request went through proxy"); } }); */ final HttpProxyServer plain = DefaultHttpProxyServer.bootstrap() .withPort(PROXY_PORT) .withFiltersSource(new HttpFiltersSourceAdapter() { @Override public HttpFilters filterRequest(HttpRequest originalRequest) { return new HttpFiltersAdapter(originalRequest) { @Override public io.netty.handler.codec.http.HttpResponse proxyToServerRequest( HttpObject httpObject) { System.out .println("Request with through proxy"); return null; } }; } }).start(); final HttpProxyServer proxy = plain; // client.getParams().setParameter(ConnRoutePNames.DEFAULT_PROXY, // new HttpHost("75.101.134.244", PROXY_PORT)); // new HttpHost("localhost", PROXY_PORT, "https")); HttpResponse response = client.execute(get); assertEquals(200, response.getStatusLine().getStatusCode()); final HttpEntity entity = response.getEntity(); final String body = IOUtils.toString(entity.getContent()).toLowerCase(); EntityUtils.consume(entity); log.info("Consuming entity -- got body: {}", body); EntityUtils.consume(response.getEntity()); log.info("Stopping proxy"); proxy.stop(); } // @Test public void testWithWebDriver() throws Exception { int port = 9090; HttpProxyServer proxyServer = DefaultHttpProxyServer.bootstrap() .withPort(port) .start(); Proxy proxy = new Proxy(); proxy.setProxyType(Proxy.ProxyType.MANUAL); String proxyStr = String.format("localhost:%d", port); proxy.setHttpProxy(proxyStr); proxy.setSslProxy(proxyStr); DesiredCapabilities capability = DesiredCapabilities.firefox(); capability.setCapability(CapabilityType.PROXY, proxy); final String urlString = "http://www.yahoo.com/"; // Note this will actually launch a browser!! final WebDriver driver = new FirefoxDriver(capability); driver.manage().timeouts().pageLoadTimeout(30, TimeUnit.SECONDS); driver.get(urlString); final String source = driver.getPageSource(); // Just make sure it got something within reason. assertTrue(source.length() > 100); driver.close(); proxyServer.stop(); } }
apache-2.0
TU-Berlin-DIMA/oligos
src/main/java/de/tu_berlin/dima/oligos/type/util/Constraint.java
941
/******************************************************************************* * Copyright 2013 DIMA Research Group, TU Berlin (http://www.dima.tu-berlin.de) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package de.tu_berlin.dima.oligos.type.util; public enum Constraint { CHECK, UNIQUE, PRIMARY_KEY, FOREIGN_KEY, NOT_NULL, NONE }
apache-2.0
0359xiaodong/libgdx
extensions/gdx-bullet/jni/swig-src/collision/com/badlogic/gdx/physics/bullet/collision/SWIGTYPE_p_btAlignedObjectArrayT_btBroadphaseInterface_p_t.java
901
/* ---------------------------------------------------------------------------- * This file was automatically generated by SWIG (http://www.swig.org). * Version 3.0.0 * * Do not make changes to this file unless you know what you are doing--modify * the SWIG interface file instead. * ----------------------------------------------------------------------------- */ package com.badlogic.gdx.physics.bullet.collision; public class SWIGTYPE_p_btAlignedObjectArrayT_btBroadphaseInterface_p_t { private long swigCPtr; protected SWIGTYPE_p_btAlignedObjectArrayT_btBroadphaseInterface_p_t(long cPtr, boolean futureUse) { swigCPtr = cPtr; } protected SWIGTYPE_p_btAlignedObjectArrayT_btBroadphaseInterface_p_t() { swigCPtr = 0; } protected static long getCPtr(SWIGTYPE_p_btAlignedObjectArrayT_btBroadphaseInterface_p_t obj) { return (obj == null) ? 0 : obj.swigCPtr; } }
apache-2.0
r-ashish/myMessenger
app/src/main/java/com/ashish/msngr/ChatActivity.java
6472
/* * Copyright 2015 Ashish Ranjan * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ashish.msngr; import android.app.Activity; import android.content.Intent; import android.os.AsyncTask; import android.os.Bundle; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.EditText; import android.widget.TextView; import android.widget.Toast; import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.util.HashMap; import java.util.Iterator; import java.util.Map; public class ChatActivity extends Activity { String senderId; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); senderId = getIntent().getStringExtra("userId"); setTitle(senderId); setContentView(R.layout.activity_chat); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_chat, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. /*int id = item.getItemId(); //noinspection SimplifiableIfStatement if (id == R.id.action_settings) { return true; }*/ return super.onOptionsItemSelected(item); } AsyncTask<Void, Void, String> sendMessageTask; public void sendMessage(View view){ final EditText idBox = ((EditText)findViewById(R.id.userId)); final EditText msgBox = ((EditText)findViewById(R.id.msg)); final String userId = idBox.getText().toString(); final String msg = msgBox.getText().toString(); try{ sendMessageTask = new AsyncTask<Void, Void, String>() { @Override protected String doInBackground(Void... params) { String result = ""; Map<String, String> paramsMap = new HashMap<String, String>(); paramsMap.put("message", msg); paramsMap.put("userid",userId); paramsMap.put("fromid",senderId); try { URL serverUrl = null; try { serverUrl = new URL(Config.APP_SERVER_URL_MSG); } catch (MalformedURLException e) { Log.e("AppUtil", "URL Connection Error: " + Config.APP_SERVER_URL_MSG, e); result = "Invalid URL: " + Config.APP_SERVER_URL_MSG; } StringBuilder postBody = new StringBuilder(); Iterator<Map.Entry<String, String>> iterator = paramsMap.entrySet() .iterator(); while (iterator.hasNext()) { Map.Entry<String, String> param = iterator.next(); postBody.append(param.getKey()).append('=') .append(param.getValue()); if (iterator.hasNext()) { postBody.append('&'); } } String body = postBody.toString(); byte[] bytes = body.getBytes(); HttpURLConnection httpCon = null; try { httpCon = (HttpURLConnection) serverUrl.openConnection(); httpCon.setDoOutput(true); httpCon.setUseCaches(false); httpCon.setFixedLengthStreamingMode(bytes.length); httpCon.setRequestMethod("POST"); httpCon.setRequestProperty("Content-Type", "application/x-www-form-urlencoded;charset=UTF-8"); OutputStream out = httpCon.getOutputStream(); out.write(bytes); out.close(); InputStream is = new BufferedInputStream(httpCon.getInputStream()); String status = ShareExternalServer.readStream(is); result = "Message sent successfully!"; } finally { if (httpCon != null) { httpCon.disconnect(); } } } catch (IOException e) { result = "Look's like there's a problem with your Internet Connection.\nCheck your connection and try again!"; Log.e("AppUtil", "Error in sharing with App Server: " + e); } return result; } @Override protected void onPostExecute(String result) { sendMessageTask = null; Toast.makeText(getApplicationContext(), result, Toast.LENGTH_LONG).show(); idBox.setText(""); msgBox.setText(""); } }; if(userId.equals("")) Toast.makeText(this,"Please enter UserId!",Toast.LENGTH_LONG).show(); else if(msg.equals(""))Toast.makeText(this,"Message is empty!",Toast.LENGTH_LONG).show(); else sendMessageTask.execute(null,null,null);}catch(Exception e){Toast.makeText(this,e+" "+e.getMessage(),Toast.LENGTH_LONG).show();} } }
apache-2.0
googleapis/google-api-java-client-services
clients/google-api-services-gameservices/v1/1.31.0/com/google/api/services/gameservices/v1/model/SetIamPolicyRequest.java
3761
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.gameservices.v1.model; /** * Request message for `SetIamPolicy` method. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Game Services API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class SetIamPolicyRequest extends com.google.api.client.json.GenericJson { /** * REQUIRED: The complete policy to be applied to the `resource`. The size of the policy is * limited to a few 10s of KB. An empty policy is a valid policy but certain Cloud Platform * services (such as Projects) might reject them. * The value may be {@code null}. */ @com.google.api.client.util.Key private Policy policy; /** * OPTIONAL: A FieldMask specifying which fields of the policy to modify. Only the fields in the * mask will be modified. If no mask is provided, the following default mask is used: `paths: * "bindings, etag"` * The value may be {@code null}. */ @com.google.api.client.util.Key private String updateMask; /** * REQUIRED: The complete policy to be applied to the `resource`. The size of the policy is * limited to a few 10s of KB. An empty policy is a valid policy but certain Cloud Platform * services (such as Projects) might reject them. * @return value or {@code null} for none */ public Policy getPolicy() { return policy; } /** * REQUIRED: The complete policy to be applied to the `resource`. The size of the policy is * limited to a few 10s of KB. An empty policy is a valid policy but certain Cloud Platform * services (such as Projects) might reject them. * @param policy policy or {@code null} for none */ public SetIamPolicyRequest setPolicy(Policy policy) { this.policy = policy; return this; } /** * OPTIONAL: A FieldMask specifying which fields of the policy to modify. Only the fields in the * mask will be modified. If no mask is provided, the following default mask is used: `paths: * "bindings, etag"` * @return value or {@code null} for none */ public String getUpdateMask() { return updateMask; } /** * OPTIONAL: A FieldMask specifying which fields of the policy to modify. Only the fields in the * mask will be modified. If no mask is provided, the following default mask is used: `paths: * "bindings, etag"` * @param updateMask updateMask or {@code null} for none */ public SetIamPolicyRequest setUpdateMask(String updateMask) { this.updateMask = updateMask; return this; } @Override public SetIamPolicyRequest set(String fieldName, Object value) { return (SetIamPolicyRequest) super.set(fieldName, value); } @Override public SetIamPolicyRequest clone() { return (SetIamPolicyRequest) super.clone(); } }
apache-2.0
tmack8001/logback-steno
src/test/java/com/arpnetworking/steno/LogValueMapFactoryTest.java
6857
/** * Copyright 2015 Groupon.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.arpnetworking.steno; import com.arpnetworking.logback.widgets.Widget; import org.apache.commons.lang3.SerializationUtils; import org.junit.Assert; import org.junit.Test; import java.lang.reflect.Constructor; import java.util.HashMap; import java.util.Map; /** * Tests for <code>LogValueMapFactory</code>. * * @author Ville Koskela (vkoskela at groupon dot com) */ public class LogValueMapFactoryTest { @Test public void testBeanIdentifierInjection() { final Widget w = new Widget("foo"); final Map<String, Object> expectedValue = new HashMap<>(); final LogValueMapFactory.LogValueMap logValueMap = LogValueMapFactory.builder(w).build(); final Map<String, Object> actualValue = logValueMap.getData(); Assert.assertEquals(expectedValue, actualValue); Assert.assertTrue(logValueMap.getTarget().isPresent()); Assert.assertSame(w, logValueMap.getTarget().get()); } @Test public void testOneKeyValuePair() { final Map<String, Object> expectedValue = new HashMap<>(); expectedValue.put("k1", "v1"); final Map<String, Object> actualValue = LogValueMapFactory.of("k1", "v1").getData(); Assert.assertEquals(expectedValue, actualValue); } @Test public void testTwoKeyValuePair() { final Map<String, Object> expectedValue = new HashMap<>(); expectedValue.put("k1", "v1"); expectedValue.put("k2", "v2"); final Map<String, Object> actualValue = LogValueMapFactory.of( "k1", "v1", "k2", "v2") .getData(); Assert.assertEquals(expectedValue, actualValue); } @Test public void testThreeKeyValuePair() { final Map<String, Object> expectedValue = new HashMap<>(); expectedValue.put("k1", "v1"); expectedValue.put("k2", "v2"); expectedValue.put("k3", "v3"); final Map<String, Object> actualValue = LogValueMapFactory.of( "k1", "v1", "k2", "v2", "k3", "v3") .getData(); Assert.assertEquals(expectedValue, actualValue); } @Test public void testFourKeyValuePair() { final Map<String, Object> expectedValue = new HashMap<>(); expectedValue.put("k1", "v1"); expectedValue.put("k2", "v2"); expectedValue.put("k3", "v3"); expectedValue.put("k4", "v4"); final Map<String, Object> actualValue = LogValueMapFactory.of( "k1", "v1", "k2", "v2", "k3", "v3", "k4", "v4") .getData(); Assert.assertEquals(expectedValue, actualValue); } @Test public void testFiveKeyValuePair() { final Map<String, Object> expectedValue = new HashMap<>(); expectedValue.put("k1", "v1"); expectedValue.put("k2", "v2"); expectedValue.put("k3", "v3"); expectedValue.put("k4", "v4"); expectedValue.put("k5", "v5"); final Map<String, Object> actualValue = LogValueMapFactory.of( "k1", "v1", "k2", "v2", "k3", "v3", "k4", "v4", "k5", "v5") .getData(); Assert.assertEquals(expectedValue, actualValue); } @Test public void testBuilderWithNullKey() { final Map<String, Object> expectedValue = new HashMap<>(); expectedValue.put("k1", "v1"); expectedValue.put("_nullKeys", true); expectedValue.put("k3", "v3"); final Map<String, Object> actualValue = LogValueMapFactory.builder() .put("k1", "v1") .put(null, "v2") .put("k3", "v3") .build() .getData(); Assert.assertEquals(expectedValue, actualValue); } @Test public void testBuilderWithNullValue() { final Map<String, Object> expectedValue = new HashMap<>(); expectedValue.put("k1", "v1"); expectedValue.put("_nullValues", true); expectedValue.put("k3", "v3"); final Map<String, Object> actualValue = LogValueMapFactory.builder() .put("k1", "v1") .put("k2", null) .put("k3", "v3") .build() .getData(); Assert.assertEquals(expectedValue, actualValue); } @Test public void testSerialization() { final Widget w = new Widget("foo"); final LogValueMapFactory.LogValueMap mapWithReference = LogValueMapFactory.builder(w).build(); Assert.assertTrue(mapWithReference.getTarget().isPresent()); Assert.assertSame(w, mapWithReference.getTarget().get()); final byte[] serializedMap = SerializationUtils.serialize(mapWithReference); final LogValueMapFactory.LogValueMap deserializedMap = SerializationUtils.deserialize(serializedMap); Assert.assertFalse(deserializedMap.getTarget().isPresent()); } @Test public void testToString() { final String asString = LogValueMapFactory.builder().build().toString(); Assert.assertNotNull(asString); Assert.assertFalse(asString.isEmpty()); final String asStringWithReference = LogValueMapFactory.builder(new Widget("foo")).build().toString(); Assert.assertNotNull(asStringWithReference); Assert.assertFalse(asStringWithReference.isEmpty()); Assert.assertTrue(asStringWithReference.contains("_id=")); Assert.assertTrue(asStringWithReference.contains("_class=com.arpnetworking.logback.widgets.Widget")); } @Test public void testPrivateConstructor() throws Exception { final Constructor<LogValueMapFactory> constructor = LogValueMapFactory.class.getDeclaredConstructor(); Assert.assertNotNull(constructor); try { constructor.newInstance(); Assert.fail("Static helper class should have private no-args constructor"); } catch (final IllegalAccessException e) { constructor.setAccessible(true); final LogValueMapFactory logValueMapFactory = constructor.newInstance(); Assert.assertNotNull(logValueMapFactory); } } }
apache-2.0
rexlin600/BasicPlatform
Server-Platform/Hibernate/3.0/memory-root/modules/memory-core/src/main/java/com/memory/platform/core/easyui/Tree.java
1338
package com.memory.platform.core.easyui; import java.util.List; /** * EasyUI tree模型 * * @author * */ public class Tree implements java.io.Serializable { private String id; private String text; private String state = "open";// open,closed private boolean checked = false; private Object attributes; private List<Tree> children; private String iconCls; private String pid; public String getId() { return id; } public void setId(String id) { this.id = id; } public String getText() { return text; } public void setText(String text) { this.text = text; } public String getState() { return state; } public void setState(String state) { this.state = state; } public boolean isChecked() { return checked; } public void setChecked(boolean checked) { this.checked = checked; } public Object getAttributes() { return attributes; } public void setAttributes(Object attributes) { this.attributes = attributes; } public List<Tree> getChildren() { return children; } public void setChildren(List<Tree> children) { this.children = children; } public String getIconCls() { return iconCls; } public void setIconCls(String iconCls) { this.iconCls = iconCls; } public String getPid() { return pid; } public void setPid(String pid) { this.pid = pid; } }
apache-2.0
Minoli/carbon-apimgt
components/apimgt/org.wso2.carbon.apimgt.rest.api.publisher/src/gen/java/org/wso2/carbon/apimgt/rest/api/publisher/ExportApi.java
3415
package org.wso2.carbon.apimgt.rest.api.publisher; import io.swagger.annotations.ApiParam; import org.wso2.carbon.apimgt.rest.api.publisher.dto.ErrorDTO; import java.io.File; import org.wso2.carbon.apimgt.rest.api.publisher.factories.ExportApiServiceFactory; import org.wso2.msf4j.Microservice; import org.wso2.msf4j.Request; import org.wso2.msf4j.formparam.FileInfo; import org.wso2.msf4j.formparam.FormDataParam; import org.osgi.service.component.annotations.Component; import java.io.InputStream; import javax.ws.rs.ApplicationPath; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.DefaultValue; import javax.ws.rs.FormParam; import javax.ws.rs.GET; import javax.ws.rs.HEAD; import javax.ws.rs.OPTIONS; import javax.ws.rs.HeaderParam; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; @Component( name = "org.wso2.carbon.apimgt.rest.api.publisher.ExportApi", service = Microservice.class, immediate = true ) @Path("/api/am/publisher/v1.[\\d]+/export") @Consumes({ "application/json" }) @Produces({ "application/json" }) @ApplicationPath("/export") @io.swagger.annotations.Api(description = "the export API") public class ExportApi implements Microservice { private final ExportApiService delegate = ExportApiServiceFactory.getExportApi(); @OPTIONS @GET @Path("/apis") @Consumes({ "application/json" }) @Produces({ "application/zip" }) @io.swagger.annotations.ApiOperation(value = "Export information related to an API.", notes = "This operation can be used to export information related to a particular API. ", response = File.class, authorizations = { @io.swagger.annotations.Authorization(value = "OAuth2Security", scopes = { @io.swagger.annotations.AuthorizationScope(scope = "apim:api_view", description = "View API") }) }, tags={ "Export Configuration", }) @io.swagger.annotations.ApiResponses(value = { @io.swagger.annotations.ApiResponse(code = 200, message = "OK. Export Configuration returned. ", response = File.class), @io.swagger.annotations.ApiResponse(code = 404, message = "Not Found. Requested API does not exist. ", response = File.class), @io.swagger.annotations.ApiResponse(code = 406, message = "Not Acceptable. The requested media type is not supported ", response = File.class), @io.swagger.annotations.ApiResponse(code = 412, message = "Precondition Failed. The request has not been performed because one of the preconditions is not met. ", response = File.class) }) public Response exportApisGet(@ApiParam(value = "API search query ",required=true) @QueryParam("query") String query ,@ApiParam(value = "Maximum size of resource array to return. ", defaultValue="25") @DefaultValue("25") @QueryParam("limit") Integer limit ,@ApiParam(value = "Starting point within the complete list of items qualified. ", defaultValue="0") @DefaultValue("0") @QueryParam("offset") Integer offset ,@Context Request request) throws NotFoundException { limit=limit==null?Integer.valueOf("25"):limit; offset=offset==null?Integer.valueOf("0"):offset; return delegate.exportApisGet(query,limit,offset,request); } }
apache-2.0
ServiceComb/java-chassis
foundations/foundation-protobuf/src/test/java/org/apache/servicecomb/foundation/protobuf/performance/TestProtoPerformance.java
4017
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.servicecomb.foundation.protobuf.performance; import java.io.IOException; import org.apache.servicecomb.foundation.protobuf.performance.cases.Empty; import org.apache.servicecomb.foundation.protobuf.performance.cases.Map; import org.apache.servicecomb.foundation.protobuf.performance.cases.Mixed; import org.apache.servicecomb.foundation.protobuf.performance.cases.Pojo; import org.apache.servicecomb.foundation.protobuf.performance.cases.PojoList; import org.apache.servicecomb.foundation.protobuf.performance.cases.Scalars; import org.apache.servicecomb.foundation.protobuf.performance.cases.SimpleList; import com.google.common.base.Strings; public class TestProtoPerformance { public static void main(String[] args) throws IOException { System.out.println("1.protobuf\n" + " in our real scenes\n" + " business model never bind to transport, and can switch between different transports dynamically\n" + " that means if we choose standard protobuf, must build protobuf models from business models each time\n" + " so should be much slower than the test results"); System.out.println("2.protoStuff\n" + " some scenes, there is no field but have getter or setter, so we can not use unsafe to access field\n" + " so we disable protoStuff unsafe feature\n\n" + " for repeated fields, protoStuff have better performance, but not compatible to protobuf\n"); System.out.println("3.jackson\n" + " not support map/any/recursive, ignore related fields"); System.out.println("4.serialize result size\n" + " ScbStrong/ScbWeak/Protobuf have the same and smaller size, because skip all default/null value"); System.setProperty("protostuff.runtime.use_sun_misc_unsafe", "false"); int count = 50_0000; printResult(new Empty().run(count)); printResult(new Scalars().run(count)); printResult(new Pojo().run(count)); printResult(new SimpleList().run(count)); printResult(new PojoList().run(count)); printResult(new Map().run(count)); printResult(new Mixed().run(count)); } private static void printResult(TestResult result) { String strFmt = Strings.repeat("%-11s", result.engineResults.size()); String numberFmt = Strings.repeat("%-11d", result.engineResults.size()); System.out.println(result.name + ": "); System.out.printf(" " + strFmt + "\n", result.engineResults.stream().map(r -> r.engineName).toArray()); System.out.printf("ser time(ms) : " + numberFmt + "\n", result.engineResults.stream().map(r -> r.msSerTime).toArray()); System.out.printf("ser len : " + numberFmt + "\n", result.engineResults.stream().map(r -> r.serBytes.length).toArray()); System.out.printf("deser time(ms): " + numberFmt + "\n", result.engineResults.stream().map(r -> r.msDeserTime).toArray()); System.out.printf("deser->ser len: " + numberFmt + "\n", result.engineResults.stream().map(r -> r.deserResultBytes.length).toArray()); System.out.printf("ser+deser(ms) : " + numberFmt + "\n\n", result.engineResults.stream().map(r -> r.msSerTime + r.msDeserTime).toArray()); } }
apache-2.0
dgestep/big-code-bang
src/main/resources/shared-util-templates/NameParser.java
11129
package ${topLevelDomain}.${companyName}.${productName}.model.util; import org.apache.commons.lang3.StringUtils; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Locale; import java.util.Set; import java.util.StringTokenizer; import java.util.TreeSet; /** * * <p> * Represents a person's name and provides functionality to break apart the name into it's appropriate * attributes (TITLE, FIRST NAME, MIDDLE NAME, LAST NAME, SUFFIX). * <p> * This class provides two Constructors. The first constructor accepts the full name as a String argument and * breaks the full name into it's respective attributes (TITLE, FIRST NAME, MIDDLE NAME, LAST NAME, SUFFIX). * After constructing this class, call the appropriate getter method to retrieve each attribute of the full * name: getTitle(), getFirstName(), getMiddleName(), getLastName(), getSuffix(). * <p> * Example: construct with "Mr. James Tiberious Kirk I" and this class will parse the string and extract the * title, the first name, the middle name, the last name, and the SUFFIX. * <p> * The second constructor reverses the functionality of the first. It excepts the full name attributes (TITLE, * FIRST NAME, MIDDLE NAME, LAST NAME, SUFFIX) as the argument. After constructing this class, call * getFullName() method to retrieve the entire name. * <p> * Example: construct with "Mr.", "James", "Tiberious", "Kirk", "I". Calling getFullName() will return * "Mr. James Tiberious Kirk I". * <p> * This class makes it simple to place one text box on a screen allowing the user to enter a full name and to * break apart the entered value into its respective pieces to be stored into database columns etc. * </p> * * @author ${codeAuthor}. */ public class NameParser implements Serializable { private static final long serialVersionUID = -5038756645781462887L; private String nameTitle; private String firstName; private String middleName; private String lastName; private String nameSuffix; private List<String> tokens; private static final String BLANK = " "; private static final String[] TITLES = { "DR.", "DR", "MR.", "MR", "MRS.", "MRS", "MS.", "MS", "MISS", "SIR" }; private static final String[] SUFFIX = { "JR.", "JR", "SR", "SR.", "I", "II", "III", "IV", "V" }; private static final String[] SUR_NAME_PREFIX = { "VAN", "VON", "DE", "BIN" }; private static Set<String> nameTitles = new TreeSet<String>(); private static Set<String> nameSuffixes = new TreeSet<String>(); private static Set<String> surnamePrefix = new TreeSet<String>(); static { nameTitles.addAll(Arrays.asList(TITLES)); nameSuffixes.addAll(Arrays.asList(SUFFIX)); surnamePrefix.addAll(Arrays.asList(SUR_NAME_PREFIX)); } /** * Constructs this class and forms the full name from the attributes passed to this constructor. * * @param nameTitle the persons title. * @param firstName the persons first name. * @param middleName the persons middle name. * @param lastName the persons last name. * @param nameSuffix any suffix the person goes by. Example: Jr. Sr. etc. */ @SuppressWarnings("PMD.ExcessiveParameterList") public NameParser(final String nameTitle, final String firstName, final String middleName, final String lastName, final String nameSuffix) { this.nameTitle = nameTitle; this.firstName = firstName; this.middleName = middleName; this.lastName = lastName; this.nameSuffix = nameSuffix; } /** * Constructs this class and breaks apart the full name into it's respective attributes. Call the * appropriate getter methods to get each attribute of the full name. * * @param fullName the persons full name. Example: "James Tiberious Kirk". */ public NameParser(final String fullName) { tokens = getTokens(fullName); final int size = tokens.size(); if (size == 0) { return; } if (size == 1) { setTitle(); if (tokens.size() == 1) { firstName = tokens.get(0); } return; } // order here matters... setTitle(); setSuffix(); setFirstName(); setLastName(); setMiddleName(); } /** * Returns the individual elements of the supplied full name. * * @param fullName the persons full name. Example: "James Tiberious Kirk". * @return the elements in a list. Example: element 0 = "James", element 1 = "Tiberious", element 2 = "Kirk". */ private List<String> getTokens(final String fullName) { final List<String> tokens = new ArrayList<>(); if (StringUtils.isEmpty(fullName)) { return tokens; } final StringTokenizer tokenizer = new StringTokenizer(fullName, BLANK); while (tokenizer.hasMoreTokens()) { tokens.add(tokenizer.nextToken().trim()); } return tokens; } /** * Extracts the persons title from the tokens and sets the title as a property of this class if the supplied title * is a known title to this class. */ private void setTitle() { final String title = StringUtils.trim((tokens.get(0))); if (nameTitles.contains(title.toUpperCase(Locale.US))) { nameTitle = title; tokens.remove(0); } } /** * Extracts the persons suffix from the tokens and sets the suffix as a property of this class if the supplied * suffix is a known suffix to this class. */ private void setSuffix() { final int index = tokens.size() - 1; final String suffix = StringUtils.trim(tokens.get(index)); if (nameSuffixes.contains(suffix.toUpperCase(Locale.US))) { nameSuffix = suffix; tokens.remove(index); } } /** * Extracts the persons first name from the tokens and sets the first name as a property of this class. */ private void setFirstName() { firstName = StringUtils.trim(tokens.get(0)); tokens.remove(0); } /** * Extracts the persons last name from the tokens and sets the last name as a property of this class. */ private void setLastName() { final int index = getTokenIndexForSurname(); if (index < 0) { // grab the last token value as the last name final int z = tokens.size() - 1; lastName = StringUtils.trim(tokens.get(z)); tokens.remove(z); return; } // include the surname as part of the last name. final StringBuilder buf = new StringBuilder(); for (int x = index, y = tokens.size(); x < y; x++) { final String name = StringUtils.trim(tokens.get(index)); buf.append(name).append(BLANK); tokens.remove(index); } lastName = buf.toString().trim(); } /** * Returns index within the tokens array where the surname is located. * * @return the index or -1 if not found. */ private int getTokenIndexForSurname() { int index = -1; for (final String prefix : surnamePrefix) { index = indexOf(prefix); if (index > -1) { break; } } return index; } /** * Returns the index within the tokens array where the supplied value is located. * * @param val the value to look for. * @return the index or -1 if not found. */ private int indexOf(String val) { val = val.toUpperCase(Locale.US); boolean foundIndex = false; int index = 0; for (final int n = tokens.size(); index < n; index++) { final String token = tokens.get(index); if (token.equalsIgnoreCase(val)) { foundIndex = true; break; } } return foundIndex ? index : -1; } /** * Extracts the persons middle name from the tokens and sets the middle name as a property of this class. */ private void setMiddleName() { if (tokens.size() == 0) { return; } final StringBuilder buf = new StringBuilder(); for (int x = 0, y = tokens.size(); x < y; x++) { final String name = StringUtils.trim(tokens.get(x)); buf.append(name).append(BLANK); } middleName = buf.toString().trim(); } /** * Returns the full name. If any attribute is null, it is converted to empty string. * * @return the full name or null. */ public String getFullName() { final StringBuilder buf = new StringBuilder(64); if (StringUtils.isNotEmpty(nameTitle)) { buf.append(StringUtils.defaultString(nameTitle)).append(BLANK); } buf.append(StringUtils.defaultString(firstName)).append(BLANK); if (StringUtils.isNotEmpty(middleName)) { buf.append(StringUtils.defaultString(middleName)).append(BLANK); } if (StringUtils.isNotEmpty(lastName)) { buf.append(StringUtils.defaultString(lastName)).append(BLANK); } if (StringUtils.isNotEmpty(nameSuffix)) { buf.append(StringUtils.defaultString(nameSuffix)); } return buf.toString().trim(); } /** * Returns the title. * * @return the title. */ public String getTitle() { return StringUtils.defaultString(nameTitle).trim(); } /** * Returns the first name. * * @return the first name. */ public String getFirstName() { return StringUtils.defaultString(firstName).trim(); } /** * Returns the middle name. * * @return the middle name. */ public String getMiddleName() { return StringUtils.defaultString(middleName).trim(); } /** * Returns the last name. * * @return the last name. */ public String getLastName() { return StringUtils.defaultString(lastName).trim(); } /** * Returns the SUFFIX. * * @return the suffix. */ public String getSuffix() { return StringUtils.defaultString(nameSuffix).trim(); } @Override public String toString() { return getFullName(); } /** * Returns a Set containing all the possible TITLES for a name. * * @return the known titles. */ public static Set<String> getTitles() { return nameTitles; } /** * Returns a Set containing all the possible suffixes for a name. * * @return the known suffixes. */ public static Set<String> getSuffixes() { return nameSuffixes; } /** * Returns a Set containing all the possible Surnames for a name. * * @return the known surnames. */ public static Set<String> getSurnamePrefixes() { return surnamePrefix; } }
apache-2.0
kuujo/copycat
core/src/main/java/io/atomix/core/multiset/impl/BlockingDistributedMultiset.java
5468
/* * Copyright 2016-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.atomix.core.multiset.impl; import io.atomix.core.collection.CollectionEventListener; import io.atomix.core.iterator.SyncIterator; import io.atomix.core.iterator.impl.BlockingIterator; import io.atomix.core.multiset.AsyncDistributedMultiset; import io.atomix.core.multiset.DistributedMultiset; import io.atomix.core.set.DistributedSet; import io.atomix.core.set.impl.BlockingDistributedSet; import io.atomix.primitive.PrimitiveException; import io.atomix.primitive.Synchronous; import javax.annotation.Nullable; import java.util.Collection; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; /** * Implementation of {@link DistributedMultiset} that merely delegates to a {@link AsyncDistributedMultiset} * and waits for the operation to complete. * * @param <E> set element type */ public class BlockingDistributedMultiset<E> extends Synchronous<AsyncDistributedMultiset<E>> implements DistributedMultiset<E> { private final long operationTimeoutMillis; private final AsyncDistributedMultiset<E> asyncSet; public BlockingDistributedMultiset(AsyncDistributedMultiset<E> asyncSet, long operationTimeoutMillis) { super(asyncSet); this.asyncSet = asyncSet; this.operationTimeoutMillis = operationTimeoutMillis; } @Override public int size() { return complete(asyncSet.size()); } @Override public boolean isEmpty() { return complete(asyncSet.isEmpty()); } @SuppressWarnings("unchecked") @Override public boolean contains(Object o) { return complete(asyncSet.contains((E) o)); } @Override public boolean add(E e) { return complete(asyncSet.add(e)); } @SuppressWarnings("unchecked") @Override public boolean remove(Object o) { return complete(asyncSet.remove((E) o)); } @SuppressWarnings("unchecked") @Override public boolean containsAll(Collection<?> c) { return complete(asyncSet.containsAll((Collection<? extends E>) c)); } @Override public boolean addAll(Collection<? extends E> c) { return complete(asyncSet.addAll(c)); } @SuppressWarnings("unchecked") @Override public boolean retainAll(Collection<?> c) { return complete(asyncSet.retainAll((Collection<? extends E>) c)); } @SuppressWarnings("unchecked") @Override public boolean removeAll(Collection<?> c) { return complete(asyncSet.removeAll((Collection<? extends E>) c)); } @Override public int count(@Nullable Object element) { return complete(asyncSet.count(element)); } @Override public int add(@Nullable E element, int occurrences) { return complete(asyncSet.add(element, occurrences)); } @Override public int remove(@Nullable Object element, int occurrences) { return complete(asyncSet.remove(element, occurrences)); } @Override public int setCount(E element, int count) { return complete(asyncSet.setCount(element, count)); } @Override public boolean setCount(E element, int oldCount, int newCount) { return complete(asyncSet.setCount(element, oldCount, newCount)); } @Override public DistributedSet<E> elementSet() { return new BlockingDistributedSet<>(asyncSet.elementSet(), operationTimeoutMillis); } @Override public DistributedSet<Entry<E>> entrySet() { return new BlockingDistributedSet<>(async().entrySet(), operationTimeoutMillis); } @Override public void clear() { complete(asyncSet.clear()); } @Override public SyncIterator<E> iterator() { return new BlockingIterator<>(asyncSet.iterator(), operationTimeoutMillis); } @Override public Object[] toArray() { return stream().toArray(); } @Override public <T> T[] toArray(T[] array) { Object[] copy = toArray(); System.arraycopy(copy, 0, array, 0, Math.min(copy.length, array.length)); return array; } @Override public void addListener(CollectionEventListener<E> listener) { complete(asyncSet.addListener(listener)); } @Override public void removeListener(CollectionEventListener<E> listener) { complete(asyncSet.removeListener(listener)); } @Override public AsyncDistributedMultiset<E> async() { return asyncSet; } private <T> T complete(CompletableFuture<T> future) { try { return future.get(operationTimeoutMillis, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new PrimitiveException.Interrupted(); } catch (TimeoutException e) { throw new PrimitiveException.Timeout(); } catch (ExecutionException e) { if (e.getCause() instanceof PrimitiveException) { throw (PrimitiveException) e.getCause(); } else { throw new PrimitiveException(e.getCause()); } } } }
apache-2.0
mdrillin/teiid-webui
teiid-webui-webapp/src/main/java/org/teiid/webui/client/widgets/vieweditor/SelectTableColumnsPage.java
6205
/* * Copyright 2014 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.teiid.webui.client.widgets.vieweditor; import java.util.List; import javax.annotation.PostConstruct; import javax.enterprise.context.Dependent; import javax.enterprise.event.Event; import javax.enterprise.event.Observes; import javax.inject.Inject; import org.jboss.errai.ui.shared.api.annotations.DataField; import org.jboss.errai.ui.shared.api.annotations.Templated; import org.teiid.webui.client.dialogs.UiEvent; import org.teiid.webui.client.dialogs.UiEventType; import org.teiid.webui.client.messages.ClientMessages; import org.teiid.webui.client.widgets.CheckableNameTypeRow; import org.teiid.webui.client.widgets.table.ColumnNamesTable; import org.teiid.webui.share.Constants; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.ui.Composite; import com.google.gwt.user.client.ui.Label; @Dependent @Templated("./SelectTableColumnsPage.html") /** * ViewEditor wizard page for selection of a single table's columns */ public class SelectTableColumnsPage extends Composite { @Inject private ClientMessages i18n; @Inject Event<UiEvent> setDdlEvent; @Inject @DataField("lbl-table-columns-message") protected Label messageLabel; @Inject @DataField("lbl-table-title") protected Label tableTitleLabel; @Inject @DataField("tbl-table-columns") protected ColumnNamesTable columnsTable; private ViewEditorWizardPanel wizard; private ViewEditorManager editorManager = ViewEditorManager.getInstance(); private String currentStatus = Constants.BLANK; private String msgCheckOneOrMoreColumns; private String msgClickApplyWhenFinished; /** * Called after construction. */ @PostConstruct protected void postConstruct() { columnsTable.setOwner(this.getClass().getName()); msgCheckOneOrMoreColumns = i18n.format("select-table-columns-page.check-one-or-more-columns.message"); msgClickApplyWhenFinished = i18n.format("select-table-columns-page.click-apply-when-finished.message"); messageLabel.setText(msgCheckOneOrMoreColumns); // Tooltips columnsTable.setTitle(i18n.format("select-table-columns-page.columnsTable.tooltip")); } /** * Refresh the panel using state from the ViewEditorManager */ public void update() { String tableName = editorManager.getTable(0); this.wizard.setWizardPageTitle(i18n.format("select-table-columns-page.title")); tableTitleLabel.setText(tableName); List<CheckableNameTypeRow> allColumns = editorManager.getColumns(0); if(allColumns!=null) { columnsTable.setData(allColumns); } updateStatus(); } /** * Set the owner wizardPanel * @param wizard the wizard */ public void setWizard(ViewEditorWizardPanel wizard) { this.wizard = wizard; } /** * Handles UiEvents from columnNamesTable * @param dEvent */ public void onUiEvent(@Observes UiEvent dEvent) { // checkbox change event from column names table if(dEvent.getType()==UiEventType.COLUMN_NAME_TABLE_CHECKBOX_CHANGED && dEvent.getEventSource().equals(this.getClass().getName())) { List<String> selectedColumns = columnsTable.getSelectedColumnNames(); List<String> selectedColumnTypes = columnsTable.getSelectedColumnTypes(); editorManager.setSelectedColumns(0, selectedColumns); editorManager.setSelectedColumnTypes(0, selectedColumnTypes); updateStatus(); } } /** * Update panel status */ private void updateStatus( ) { currentStatus = Constants.OK; // Ensure some columns are selected if(Constants.OK.equals(currentStatus)) { List<String> selectedColumns = editorManager.getSelectedColumns(0); int nCols = (selectedColumns==null) ? 0 : selectedColumns.size(); if(nCols == 0) { currentStatus = msgCheckOneOrMoreColumns; } } // Enable setDdlButton button if OK if(Constants.OK.equals(currentStatus)) { messageLabel.setText(msgClickApplyWhenFinished); this.wizard.setNextOrReplaceButton(true); } else { messageLabel.setText(currentStatus); this.wizard.setNextOrReplaceButton(false); } } /** * Handles when the user clicks the Replace DDL button. */ public void replaceDdlClicked( ) { List<String> colNames = columnsTable.getSelectedColumnNames(); List<String> colTypes = columnsTable.getSelectedColumnTypes(); if(colNames.isEmpty()) { Window.alert("Please select one or more columns"); return; } // Set the selected columns before generating ddl editorManager.setSelectedColumns(0, colNames); editorManager.setSelectedColumnTypes(0, colTypes); // Builds a new View DDL based on selections and moves it to the View Defn area. String viewDdl = editorManager.buildViewDdl(); List<String> sources = editorManager.getSources(); UiEvent uiEvent = new UiEvent(UiEventType.VIEW_DEFN_REPLACE_FROM_SSOURCE_EDITOR); uiEvent.setViewDdl(viewDdl); uiEvent.setViewSources(sources); setDdlEvent.fire(uiEvent); } /** * Get the List of selected Column names * @return the column names */ public List<String> getSelectedColumnNames() { return columnsTable.getSelectedColumnNames(); } /** * Get the List of selected Column types * @return the column types */ public List<String> getSelectedColumnTypes() { return columnsTable.getSelectedColumnTypes(); } }
apache-2.0
vthangathurai/SOA-Runtime
codegen/codegen-tools/src/main/java/org/ebayopensource/turmeric/tools/codegen/validator/MessageObject.java
2313
/******************************************************************************* * Copyright (c) 2006-2010 eBay Inc. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 *******************************************************************************/ package org.ebayopensource.turmeric.tools.codegen.validator; /** * Holder class for keeping validation error information. * * * @author rmandapati */ public class MessageObject { private String m_methodName; private String m_message; private String m_resolution; private boolean m_isFatalError; public MessageObject() {} public MessageObject(String message) { m_message = message; } public MessageObject(String message, boolean isFatal) { m_message = message; m_isFatalError = isFatal; } public MessageObject( String methodName, String message) { m_methodName = methodName; m_message = message; } public MessageObject( String methodName, String message, String resolution) { m_methodName = methodName; m_message = message; m_resolution = resolution; } public String getMessage() { return m_message; } public void setMessage(String message) { this.m_message = message; } public String getMethodName() { return m_methodName; } public void setMethodName(String name) { m_methodName = name; } public String getResolution() { return m_resolution; } public void setResolution(String resolution) { this.m_resolution = resolution; } public boolean isFatalError() { return m_isFatalError; } public void setIsFatalError(boolean fatalError) { m_isFatalError = fatalError; } public String toString() { StringBuilder strBuilder = new StringBuilder(); if (getMethodName() != null) { strBuilder.append("Method Name : ").append(getMethodName()).append("\n"); } if (getMessage() != null) { strBuilder.append("Error Message : ").append(getMessage()); } if (getResolution() != null) { strBuilder.append("\nHow to fix : \n").append(getResolution()); } return strBuilder.toString(); } }
apache-2.0
entityresolution/Entity_Resolution_Service_Intermediary_OSGi
src/main/java/gov/nij/bundles/intermediaries/ers/osgi/EntityResolutionService.java
16987
/* * Copyright 2013 SEARCH Group, Incorporated. * * See the NOTICE file distributed with this work for additional information * regarding copyright ownership. SEARCH Group Inc. licenses this file to You * under the Apache License, Version 2.0 (the "License"); you may not use this * file except in compliance with the License. You may obtain a copy of the * License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package gov.nij.bundles.intermediaries.ers.osgi; import gov.nij.er.ExternallyIdentifiableRecordFactory; import gov.nij.er.StringDistanceScoreMatcher; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import serf.data.Attribute; import serf.data.BasicMatcherMerger; import serf.data.ExistentialBooleanComparator; import serf.data.Record; import serf.deduplication.RSwoosh; /** * An OSGi service that provides the ability to resolve "entities". The interface closely follows the Stanford SERF toolkit. * */ public class EntityResolutionService { private static final Log LOG = LogFactory.getLog(EntityResolutionService.class); /** * Resolve a set of entities. * * @param records * The input records, in an enhanced SERF structure * @param attributeParameters * The information needed to control the resolution (algorithm, merge threshold, determinativeness) * @return A set of records with merged entities and a set of statistics for the merge * @throws Exception */ public EntityResolutionResults resolveEntities(List<RecordWrapper> recordWrappers, Set<AttributeParameters> attributeParameters) throws Exception { return resolveEntities(recordWrappers, attributeParameters, Integer.MAX_VALUE); } /** * Resolve a set of entities. * * @param records * The input records, in an enhanced SERF structure * @param attributeParameters * The information needed to control the resolution (algorithm, merge threshold, determinativeness) * @param recordLimit * If the number of input records is greater than this, entity resolution will not be performed * @return A set of records with merged entities and a set of statistics for the merge * @throws Exception */ public EntityResolutionResults resolveEntities(List<RecordWrapper> recordWrappers, Set<AttributeParameters> attributeParameters, int recordLimit) throws Exception { verifyProperSortConfig(attributeParameters); List<ExternallyIdentifiableRecord> returnRecordList = null; boolean recordLimitExceeded = false; if (recordWrappers.size() <= recordLimit) { ERSMatcherMerger matcherMerger = new ERSMatcherMerger(); matcherMerger.init(attributeParameters); LOG.debug("In resolveEntities, recordWrappers=" + recordWrappers); Set<Record> inputRecords = new HashSet<Record>(); List<ExternallyIdentifiableRecord> records = EntityResolutionConversionUtils.convertRecordWrappers(recordWrappers); inputRecords.addAll(records); Set<Record> rSwooshMerged = RSwoosh.execute(matcherMerger, inputRecords); LOG.debug("In resolveEntities, merged records from RSwoosh=" + rSwooshMerged); Set<ExternallyIdentifiableRecord> returnRecords = new HashSet<ExternallyIdentifiableRecord>(); for (Record r : rSwooshMerged) { if (!(r instanceof ExternallyIdentifiableRecord)) { throw new IllegalStateException("Somehow a type of Record other than an ExternallyIdentifiableRecord got into the merge results, type=" + r.getClass().getName()); } returnRecords.add((ExternallyIdentifiableRecord) r); } returnRecordList = new ArrayList<ExternallyIdentifiableRecord>(); returnRecordList.addAll(returnRecords); } else { returnRecordList = EntityResolutionConversionUtils.convertRecordWrappers(recordWrappers); recordLimitExceeded = true; } Collections.sort(returnRecordList, new RecordComparator(attributeParameters)); LOG.debug("In resolveEntities, returnRecords combining DF and RSwoosh=" + returnRecordList); Map<String, Set<AttributeStatistics>> statistics = computeStatistics(returnRecordList, attributeParameters); EntityResolutionResults ret = new EntityResolutionResults(EntityResolutionConversionUtils.convertRecords(returnRecordList), statistics, recordLimitExceeded); return ret; } private void verifyProperSortConfig(Set<AttributeParameters> attributeParameters) { Set<Integer> ranks = new HashSet<Integer>(); for (AttributeParameters ap : attributeParameters) { SortOrderSpecification sos = ap.getSortOrder(); if (sos != null) { Integer rank = sos.getSortOrderRank(); if (ranks.contains(rank)) { throw new IllegalStateException("Duplicate sort rank of " + rank + " configured."); } ranks.add(rank); } } } private Map<String, Set<AttributeStatistics>> computeStatistics(List<ExternallyIdentifiableRecord> records, Set<AttributeParameters> attributeParameters) { // the reason we do this after the fact, rather than computing the // statistics as we go, is to leave the RSwoosh implementation // from SERF as intact as possible. Since it does not compute stats, we // do so in a second pass after the fact. Map<String, Set<AttributeStatistics>> ret = new HashMap<String, Set<AttributeStatistics>>(); for (ExternallyIdentifiableRecord record : records) { Set<AttributeStatistics> statSet = new HashSet<AttributeStatistics>(); ret.put(record.getExternalId(), statSet); for (AttributeParameters ap : attributeParameters) { double mean = computeMean(record, ap); double sd = computeStandardDeviation(record, ap); AttributeStatistics stats = new AttributeStatistics(ap.getAttributeName()); // note: for the initial pilot, we are returning zero for these // values, because we need to re-assess what these // metrics mean and do not have time to do so prior to the first // federated query demo. consider this sample case. // You have a record that is a merge of 10 original records, and // because 9 of them had identical values for an // attribute, there are only two attribute values in the // resultant merged record. if we compute the mean as the // mean of the pairwise distance values, we would get a very // skewed measure, since it would be one distance divided by // two, when in fact 10 original records were involved. So we // either need to change RSwoosh to keep track of stats // as we go, or else come up with some other "quality" measure // that accounts for attribute value merging. stats.setAverageStringDistance(mean); stats.setStandardDeviationStringDistance(sd); statSet.add(stats); } } return ret; } private double computeStandardDeviation(ExternallyIdentifiableRecord record, AttributeParameters attributeParameters) { return 0; } private double computeMean(ExternallyIdentifiableRecord record, AttributeParameters attributeParameters) { return 0; } private static final class ERSMatcherMerger extends BasicMatcherMerger { private Map<String, ExistentialBooleanComparator> comparatorMap = new HashMap<String, ExistentialBooleanComparator>(); private Set<AttributeParameters> attributeParameters; public ERSMatcherMerger() { super._factory = new ExternallyIdentifiableRecordFactory(); } public void init(Set<AttributeParameters> attributeParameters) throws Exception { this.attributeParameters = attributeParameters; LOG.info("Initializing ERSMatcherMerger with parameters " + attributeParameters); for (AttributeParameters ap : attributeParameters) { if (ap == null || ap.getAttributeName() == null || ap.getAlgorithmClassName() == null) { throw new IllegalArgumentException("AttributeParameters object has a null object."); } StringDistanceScoreMatcher matcher = new StringDistanceScoreMatcher(ap.getAlgorithmClassName()); matcher.init(ap.getThreshold()); comparatorMap.put(ap.getAttributeName(), new ExistentialBooleanComparator(matcher)); } } private static final int MATCH = 1; private static final int NO_MATCH = 2; private static final int MATCH_INDETERMINATE = 3; protected boolean matchInternal(Record r1, Record r2) { Map<String, Attribute> r1attr = r1.getAttributes(); Map<String, Attribute> r2attr = r2.getAttributes(); if (!haveSameAttributes(r1attr, r2attr)) { return false; } int deterministicMatch = matchDeterministicAttributes(r1, r2); if (deterministicMatch == MATCH_INDETERMINATE) { LOG.debug("Indeterminate result from deterministic evaluation"); boolean nonDeterministicAttributeExists = false; for (String s1 : comparatorMap.keySet()) { Attribute a1 = r1attr.get(s1); if (a1 == null) { LOG.warn("Record does not contain specified attribute " + s1 + ", record=" + r1); } Attribute a2 = r2attr.get(s1); if (a2 == null) { LOG.warn("Record does not contain specified attribute " + s1 + ", record=" + r2); } if (!attributeIsDeterminative(s1)) { nonDeterministicAttributeExists = true; LOG.debug("Non deterministic match evaluation on attribute " + s1); ExistentialBooleanComparator ebc = comparatorMap.get(s1); if (!ebc.attributesMatch(a1, a2)) { LOG.debug("Attribute a1=" + a1 + " and a2=" + a2 + " do not match, thus records do not match"); return false; } } } LOG.debug(nonDeterministicAttributeExists ? "Records match" : "Records do not match because the deterministic factors match was indeterminate, and there were no non-deterministic attributes"); return nonDeterministicAttributeExists; } return deterministicMatch == MATCH; } private int matchDeterministicAttributes(Record r1, Record r2) { Map<String, Attribute> r1attr = r1.getAttributes(); Map<String, Attribute> r2attr = r2.getAttributes(); boolean r1r2PairsAllNull = true; for (String s1 : comparatorMap.keySet()) { if (attributeIsDeterminative(s1)) { LOG.debug("Evaluating deterministic attribute " + s1); Attribute a1 = r1attr.get(s1); boolean a1AllNull = attributeAllNull(a1); Attribute a2 = r2attr.get(s1); boolean a2AllNull = attributeAllNull(a2); if (!(a1AllNull || a2AllNull)) { if (!identical(a1, a2)) { LOG.debug("Records do not match due to unequal, non-null deterministic attributes"); return NO_MATCH; } r1r2PairsAllNull = false; } } } return (r1r2PairsAllNull) ? MATCH_INDETERMINATE : MATCH; } private boolean attributeAllNull(Attribute a) { boolean ret = false; if (a != null) { Iterator<String> values = a.iterator(); boolean allNull = true; while (values.hasNext() && allNull) { if (values.next() != null) { allNull = false; } } ret = allNull; } return ret; } private boolean attributeIsDeterminative(String s1) { for (AttributeParameters ap : attributeParameters) { if (s1.equals(ap.getAttributeName()) && ap.isDeterminative()) { return true; } } return false; } private boolean identical(Attribute p1, Attribute p2) { if (p1 == null || p2 == null) return false; Iterator<String> i1 = p1.iterator(); while (i1.hasNext()) { String s1 = (String) i1.next(); Iterator<String> i2 = p2.iterator(); while (i2.hasNext()) { String s2 = (String) i2.next(); if (!stringsSame(s1, s2)) { return false; } } } return true; } private boolean stringsSame(String s1, String s2) { return s1 != null && s2 != null && s1.equals(s2); } private boolean haveSameAttributes(Map<String, Attribute> r1attr, Map<String, Attribute> r2attr) { return (r1attr == null && r2attr == null) || (r1attr.isEmpty() && r2attr.isEmpty()) || (r1attr.keySet().containsAll(r2attr.keySet()) && r2attr.keySet().containsAll(r1attr.keySet())); } } private static final class RecordComparator implements Comparator<ExternallyIdentifiableRecord> { private List<AttributeParameters> attributeParameters; public RecordComparator(Set<AttributeParameters> attributeParameters) { this.attributeParameters = new ArrayList<AttributeParameters>(); this.attributeParameters.addAll(attributeParameters); Collections.sort(this.attributeParameters, new AttributeParametersComparator()); } @Override public int compare(ExternallyIdentifiableRecord r1, ExternallyIdentifiableRecord r2) { for (AttributeParameters ap : attributeParameters) { SortOrderSpecification sos = ap.getSortOrder(); if (sos != null) { int reverseFactor = sos.getSortOrder().equals(SortOrderSpecification.SORT_ORDER_ASCENDING) ? 1 : -1; Attribute a1 = r1.getAttribute(ap.getAttributeName()); Attribute a2 = r2.getAttribute(ap.getAttributeName()); if (!((a1 == null && a2 == null) || (a1.equals(a2)))) { return a1.compareTo(a2) * reverseFactor; } } } return 0; } private static final class AttributeParametersComparator implements Comparator<AttributeParameters> { @Override public int compare(AttributeParameters o1, AttributeParameters o2) { int ret = 0; if (o1 != null && o2 != null) { SortOrderSpecification sos1 = o1.getSortOrder(); SortOrderSpecification sos2 = o2.getSortOrder(); if (sos1 != null && sos2 == null) { ret = 1; } else if (sos2 != null && sos1 == null) { ret = -1; } else if (sos1 != null && sos2 != null) { int rank1 = sos1.getSortOrderRank(); int rank2 = sos2.getSortOrderRank(); if (rank1 != rank2) { ret = (rank1 > rank2 ? 1 : -1); } } } return ret; } } } }
apache-2.0
aruanruan/copycat
protocol/src/main/java/net/kuujo/copycat/raft/protocol/Query.java
2956
/* * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.kuujo.copycat.raft.protocol; import net.kuujo.copycat.util.BuilderPool; /** * Raft state queries read system state. * <p> * Queries are submitted by clients to a Raft server to read Raft cluster-wide state. In contrast to * {@link Command commands}, queries allow for more flexible * {@link ConsistencyLevel consistency levels} that trade consistency for performance. * <p> * All queries must specify a {@link #consistency()} with which to execute the query. The provided consistency level * dictates how queries are submitted to the Raft cluster. Higher consistency levels like * {@link ConsistencyLevel#LINEARIZABLE} and {@link ConsistencyLevel#LINEARIZABLE_LEASE} * are forwarded to the cluster leader, while lower levels are allowed to read from followers for higher throughput. * <p> * By default, all queries should use the strongest consistency level, {@link ConsistencyLevel#LINEARIZABLE}. * It is essential that users understand the trade-offs in the various consistency levels before using them. * * @see ConsistencyLevel * * @author <a href="http://github.com/kuujo">Jordan Halterman</a> */ public interface Query<T> extends Operation<T> { /** * Returns the query consistency level. * <p> * The consistency will dictate how the query is executed on the server state. Stronger consistency levels can guarantee * linearizability in all or most cases, while weaker consistency levels trade linearizability for more performant * reads from followers. Consult the {@link ConsistencyLevel} documentation for more information * on the different consistency levels. * <p> * By default, this method enforces strong consistency with the {@link ConsistencyLevel#LINEARIZABLE} consistency level. * * @return The query consistency level. */ default ConsistencyLevel consistency() { return ConsistencyLevel.LINEARIZABLE; } /** * Base builder for queries. */ abstract class Builder<T extends Builder<T, U, V>, U extends Query<V>, V> extends Operation.Builder<T, U, V> { protected U query; protected Builder(BuilderPool<T, U> pool) { super(pool); } @Override protected void reset(U query) { super.reset(query); this.query = query; } @Override public U build() { close(); return query; } } }
apache-2.0
bshp/midPoint
gui/admin-gui/src/main/java/com/evolveum/midpoint/gui/api/model/LoadableModel.java
3113
/* * Copyright (c) 2010-2017 Evolveum and contributors * * This work is dual-licensed under the Apache License 2.0 * and European Union Public License. See LICENSE file for details. */ package com.evolveum.midpoint.gui.api.model; import com.evolveum.midpoint.gui.api.util.WebComponentUtil; import com.evolveum.midpoint.prism.PrismContext; import com.evolveum.midpoint.util.Producer; import com.evolveum.midpoint.util.exception.SchemaException; import org.apache.wicket.model.IModel; /** * Lazy loading Wicket model. This is an IModel implementation * that loads the model content only when needed. This is supposed * to be used for objects that are expensive to load. So we can avoid * loading the at all when a component that uses them is not visible. * * @author lazyman */ public abstract class LoadableModel<T> implements IModel<T> { private T object; private boolean loaded = false; private boolean alwaysReload; public LoadableModel() { this(null, true); } public LoadableModel(boolean alwaysReload) { this(null, alwaysReload); } public LoadableModel(T object) { this(object, true); } public LoadableModel(T object, boolean alwaysReload) { this.object = object; this.alwaysReload = alwaysReload; } public static <T> LoadableModel<T> create(Producer<T> producer, boolean alwaysReload) { return new LoadableModel<T>(alwaysReload) { @Override protected T load() { return producer.run(); } }; } public T getObject() { if (!loaded) { setObject(load()); onLoad(); this.loaded = true; } if (object instanceof IModel) { IModel model = (IModel) object; return (T) model.getObject(); } return object; } public void setObject(T object) { if (this.object instanceof IModel) { ((IModel<T>) this.object).setObject(object); } else { this.object = object; } this.loaded = true; } public boolean isLoaded() { return loaded; } public void reset() { loaded = false; } public void detach() { if (loaded && alwaysReload) { this.loaded = false; object = null; onDetach(); } } public IModel getNestedModel() { if (object instanceof IModel) { return (IModel) object; } else { return null; } } public String toString() { StringBuilder builder = new StringBuilder(); builder.append(":attached=").append(loaded).append(":object=[").append(this.object).append("]"); return builder.toString(); } protected abstract T load(); protected void onLoad() { } protected void onDetach() { } public void revive(PrismContext prismContext) throws SchemaException { if (isLoaded()) { WebComponentUtil.reviveObject(object, prismContext); } } }
apache-2.0
RNDITS/geonetworking
asn1-uper/src/main/java/net/gcdc/asn1/uper/StringCoder.java
15561
package net.gcdc.asn1.uper; import java.lang.annotation.Annotation; import java.math.BigInteger; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import net.gcdc.asn1.datatypes.Asn1String; import net.gcdc.asn1.datatypes.CharacterRestriction; import net.gcdc.asn1.datatypes.DefaultAlphabet; import net.gcdc.asn1.datatypes.FixedSize; import net.gcdc.asn1.datatypes.RestrictedString; import net.gcdc.asn1.datatypes.SizeRange; import org.slf4j.Logger; import org.slf4j.LoggerFactory; class StringCoder implements Decoder, Encoder { private static final Logger LOGGER = LoggerFactory.getLogger(StringCoder.class); @Override public <T> boolean canEncode(T obj, Annotation[] extraAnnotations) { return obj instanceof String || obj instanceof Asn1String; } @Override public <T> void encode(BitBuffer bitbuffer, T obj, Annotation[] extraAnnotations) throws Asn1EncodingException { Class<?> type = obj.getClass(); AnnotationStore annotations = new AnnotationStore(type.getAnnotations(), extraAnnotations); UperEncoder.logger.debug("STRING {} of type {}", obj, obj.getClass().getName()); String string = (obj instanceof String) ? ((String) obj) : ((Asn1String) obj).value(); RestrictedString restrictionAnnotation = annotations .getAnnotation(RestrictedString.class); if (restrictionAnnotation == null) { throw new UnsupportedOperationException( "Unrestricted character strings are not supported yet. All annotations: " + Arrays.asList(type.getAnnotations())); } FixedSize fixedSize = annotations.getAnnotation(FixedSize.class); SizeRange sizeRange = annotations.getAnnotation(SizeRange.class); if (fixedSize != null && fixedSize.value() != string.length()) { throw new IllegalArgumentException( "Bad string length, expected " + fixedSize.value() + ", got " + string.length()); } if (sizeRange != null && !sizeRange.hasExtensionMarker() && (string.length() < sizeRange.minValue() || sizeRange.maxValue() < string .length())) { throw new IllegalArgumentException( "Bad string length, expected " + sizeRange.minValue() + ".." + sizeRange.maxValue() + ", got " + string.length()); } if (restrictionAnnotation.value() == CharacterRestriction.UTF8String) { // UTF8 length BitBuffer stringbuffer = ByteBitBuffer.createInfinite(); for (char c : string.toCharArray()) { encodeChar(stringbuffer, c, restrictionAnnotation); } stringbuffer.flip(); if (stringbuffer.limit() % 8 != 0) { throw new AssertionError( "utf8 encoding resulted not in multiple of 8 bits"); } int numOctets = (stringbuffer.limit() + 7) / 8; // Actually +7 is not needed here, // since we already checked with %8. int position1 = bitbuffer.position(); UperEncoder.encodeLengthDeterminant(bitbuffer, numOctets); UperEncoder.logger.debug("UTF8String {}, length {} octets, encoded as {}", string, numOctets, bitbuffer.toBooleanStringFromPosition(position1)); int position2 = bitbuffer.position(); for (int i = 0; i < stringbuffer.limit(); i++) { bitbuffer.put(stringbuffer.get()); } UperEncoder.logger.debug("UTF8String {}, encoded length {} octets, value bits: {}", string, numOctets, bitbuffer.toBooleanStringFromPosition(position2)); return; } else if (fixedSize != null) { if (fixedSize.value() != string.length()) { throw new IllegalArgumentException( "String length does not match constraints"); } int position = bitbuffer.position(); for (int i = 0; i < fixedSize.value(); i++) { encodeChar(bitbuffer, string.charAt(i), restrictionAnnotation); } UperEncoder.logger.debug("string encoded as <{}>", bitbuffer.toBooleanStringFromPosition(position)); return; } else if (sizeRange != null) { UperEncoder.logger.debug("string length"); UperEncoder.encodeConstrainedInt(bitbuffer, string.length(), sizeRange.minValue(), sizeRange.maxValue(), sizeRange.hasExtensionMarker()); UperEncoder.logger.debug("string content"); for (int i = 0; i < string.length(); i++) { encodeChar(bitbuffer, string.charAt(i), restrictionAnnotation); } // logger.debug("string of type {} size {}: {}", obj.getClass().getName(), // binaryStringFromCollection(lengthBits), binaryStringFromCollection(valuebits)); return; } else { int position1 = bitbuffer.position(); UperEncoder.encodeLengthDeterminant(bitbuffer, string.length()); int position2 = bitbuffer.position(); for (int i = 0; i < string.length(); i++) { encodeChar(bitbuffer, string.charAt(i), restrictionAnnotation); } UperEncoder.logger.debug("STRING {} size {}: {}", obj.getClass().getName(), bitbuffer.toBooleanString(position1, position2 - position1), bitbuffer.toBooleanStringFromPosition(position2)); return; } } @Override public <T> boolean canDecode(Class<T> classOfT, Annotation[] extraAnnotations) { return String.class.isAssignableFrom(classOfT) || Asn1String.class.isAssignableFrom(classOfT); } @Override public <T> T decode(BitBuffer bitbuffer, Class<T> classOfT, Annotation[] extraAnnotations) { AnnotationStore annotations = new AnnotationStore(classOfT.getAnnotations(), extraAnnotations); UperEncoder.logger.debug("String"); RestrictedString restrictionAnnotation = annotations .getAnnotation(RestrictedString.class); if (restrictionAnnotation == null) { throw new UnsupportedOperationException( "Unrestricted character strings are not supported yet. All annotations: " + Arrays.asList(classOfT.getAnnotations())); } if (restrictionAnnotation.value() == CharacterRestriction.UTF8String) { long numOctets = UperEncoder.decodeLengthDeterminant(bitbuffer); List<Boolean> content = new ArrayList<Boolean>(); for (int i = 0; i < numOctets * 8; i++) { content.add(bitbuffer.get()); } byte[] contentBytes = UperEncoder.bytesFromCollection(content); String resultStr = StandardCharsets.UTF_8.decode(ByteBuffer.wrap(contentBytes)) .toString(); T result = UperEncoder.instantiate(classOfT, resultStr); return result; } else { FixedSize fixedSize = annotations.getAnnotation(FixedSize.class); SizeRange sizeRange = annotations.getAnnotation(SizeRange.class); long numChars = (fixedSize != null) ? fixedSize.value() : (sizeRange != null) ? UperEncoder.decodeConstrainedInt(bitbuffer, UperEncoder.intRangeFromSizeRange(sizeRange)) : UperEncoder.decodeLengthDeterminant(bitbuffer); UperEncoder.logger.debug("known-multiplier string, numchars: {}", numChars); StringBuilder stringBuilder = new StringBuilder((int) numChars); for (int c = 0; c < numChars; c++) { stringBuilder.append(decodeRestrictedChar(bitbuffer, restrictionAnnotation)); } String resultStr = stringBuilder.toString(); UperEncoder.logger.debug("Decoded as {}", resultStr); T result = UperEncoder.instantiate(classOfT, resultStr); return result; } } private static void encodeChar(BitBuffer bitbuffer, char c, RestrictedString restriction) throws Asn1EncodingException { UperEncoder.logger.debug("char {}", c); switch (restriction.value()) { case IA5String: if (restriction.alphabet() != DefaultAlphabet.class) { throw new UnsupportedOperationException( "alphabet for IA5String is not supported yet."); } UperEncoder.encodeConstrainedInt( bitbuffer, StandardCharsets.US_ASCII.encode(CharBuffer.wrap(new char[] { c })).get() & 0xff, 0, 127); return; case UTF8String: if (restriction.alphabet() != DefaultAlphabet.class) { throw new UnsupportedOperationException( "alphabet for UTF8 is not supported yet."); } ByteBuffer buffer = StandardCharsets.UTF_8 .encode(CharBuffer.wrap(new char[] { c })); for (int i = 0; i < buffer.limit(); i++) { UperEncoder.encodeConstrainedInt(bitbuffer, buffer.get() & 0xff, 0, 255); } return; case VisibleString: case ISO646String: if (restriction.alphabet() != DefaultAlphabet.class) { char[] chars; try { chars = UperEncoder.instantiate(restriction.alphabet()).chars().toCharArray(); } catch (IllegalArgumentException e) { LOGGER.info("Uninstantinatable alphabet ", e); throw new IllegalArgumentException("Uninstantinatable alphabet" + restriction.alphabet().getName()); } if (BigInteger.valueOf(chars.length - 1).bitLength() < BigInteger.valueOf(126) .bitLength()) { Arrays.sort(chars); String strAlphabet = new String(chars); int index = strAlphabet.indexOf(c); if (index < 0) { throw new IllegalArgumentException("can't find character " + c + " in alphabet " + strAlphabet); } UperEncoder.encodeConstrainedInt( bitbuffer, index, 0, chars.length - 1); return; } else { UperEncoder.encodeConstrainedInt( bitbuffer, StandardCharsets.US_ASCII.encode(CharBuffer.wrap(new char[] { c })) .get() & 0xff, 0, 126); return; } } else { UperEncoder.encodeConstrainedInt( bitbuffer, StandardCharsets.US_ASCII.encode(CharBuffer.wrap(new char[] { c })) .get() & 0xff, 0, 126); return; } default: throw new UnsupportedOperationException("String type " + restriction + " is not supported yet"); } } private static String decodeRestrictedChar(BitBuffer bitqueue, RestrictedString restrictionAnnotation) { switch (restrictionAnnotation.value()) { case IA5String: { if (restrictionAnnotation.alphabet() != DefaultAlphabet.class) { throw new UnsupportedOperationException( "alphabet for IA5String is not supported yet."); } byte charByte = (byte) UperEncoder.decodeConstrainedInt(bitqueue, UperEncoder.newRange(0, 127, false)); byte[] bytes = new byte[] { charByte }; String result = StandardCharsets.US_ASCII.decode(ByteBuffer.wrap(bytes)).toString(); if (result.length() != 1) { throw new AssertionError("decoded more than one char (" + result + ")"); } return result; } case VisibleString: case ISO646String: { if (restrictionAnnotation.alphabet() != DefaultAlphabet.class) { char[] chars; try { chars = UperEncoder.instantiate(restrictionAnnotation.alphabet()).chars().toCharArray(); } catch (IllegalArgumentException e) { LOGGER.info("Uninstantinatable alphabet ", e); throw new IllegalArgumentException("Uninstantinatable alphabet" + restrictionAnnotation.alphabet().getName()); } if (BigInteger.valueOf(chars.length - 1).bitLength() < BigInteger.valueOf(126) .bitLength()) { Arrays.sort(chars); int index = (byte) UperEncoder.decodeConstrainedInt(bitqueue, UperEncoder.newRange(0, chars.length - 1, false)); String strAlphabet = new String(chars); char c = strAlphabet.charAt(index); String result = new String("" + c); return result; } else { // Encode normally byte charByte = (byte) UperEncoder.decodeConstrainedInt(bitqueue, UperEncoder.newRange(0, 126, false)); byte[] bytes = new byte[] { charByte }; String result = StandardCharsets.US_ASCII.decode(ByteBuffer.wrap(bytes)) .toString(); if (result.length() != 1) { throw new AssertionError( "decoded more than one char (" + result + ")"); } return result; } } else { // Encode normally byte charByte = (byte) UperEncoder.decodeConstrainedInt(bitqueue, UperEncoder.newRange(0, 126, false)); byte[] bytes = new byte[] { charByte }; String result = StandardCharsets.US_ASCII.decode(ByteBuffer.wrap(bytes)) .toString(); if (result.length() != 1) { throw new AssertionError( "decoded more than one char (" + result + ")"); } return result; } } default: throw new UnsupportedOperationException("String type " + restrictionAnnotation + " is not supported yet"); } } }
apache-2.0
bbossgroups/bbossgroups-3.5
bboss-websocket/src/org/frameworkset/web/socket/handler/WebSocketHttpRequestHandler.java
4623
package org.frameworkset.web.socket.handler; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.jsp.PageContext; import org.frameworkset.http.ServerHttpRequest; import org.frameworkset.http.ServerHttpResponse; import org.frameworkset.http.ServletServerHttpRequest; import org.frameworkset.http.ServletServerHttpResponse; import org.frameworkset.spi.Lifecycle; import org.frameworkset.util.Assert; import org.frameworkset.web.HttpRequestHandler; import org.frameworkset.web.servlet.context.ServletContextAware; import org.frameworkset.web.socket.inf.WebSocketHandler; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class WebSocketHttpRequestHandler implements HttpRequestHandler, Lifecycle, ServletContextAware { private final Logger logger = LoggerFactory.getLogger(WebSocketHttpRequestHandler.class); private final WebSocketHandler wsHandler; private final HandshakeHandler handshakeHandler; private final List<HandshakeInterceptor> interceptors = new ArrayList<HandshakeInterceptor>(); private volatile boolean running = false; public WebSocketHttpRequestHandler(WebSocketHandler wsHandler) { this(wsHandler, new DefaultHandshakeHandler()); } public WebSocketHttpRequestHandler(WebSocketHandler wsHandler, HandshakeHandler handshakeHandler) { Assert.notNull(wsHandler, "wsHandler must not be null"); Assert.notNull(handshakeHandler, "handshakeHandler must not be null"); this.wsHandler = new ExceptionWebSocketHandlerDecorator(new LoggingWebSocketHandlerDecorator(wsHandler)); this.handshakeHandler = handshakeHandler; } /** * Return the WebSocketHandler. */ public WebSocketHandler getWebSocketHandler() { return this.wsHandler; } /** * Return the HandshakeHandler. */ public HandshakeHandler getHandshakeHandler() { return this.handshakeHandler; } /** * Configure one or more WebSocket handshake request interceptors. */ public void setHandshakeInterceptors(List<HandshakeInterceptor> interceptors) { this.interceptors.clear(); if (interceptors != null) { this.interceptors.addAll(interceptors); } } /** * Return the configured WebSocket handshake request interceptors. */ public List<HandshakeInterceptor> getHandshakeInterceptors() { return this.interceptors; } @Override public void setServletContext(ServletContext servletContext) { if (this.handshakeHandler instanceof ServletContextAware) { ((ServletContextAware) this.handshakeHandler).setServletContext(servletContext); } } @Override public boolean isRunning() { return this.running; } @Override public void start() { if (!isRunning()) { this.running = true; if (this.handshakeHandler instanceof Lifecycle) { ((Lifecycle) this.handshakeHandler).start(); } } } @Override public void stop() { if (isRunning()) { this.running = false; if (this.handshakeHandler instanceof Lifecycle) { ((Lifecycle) this.handshakeHandler).stop(); } } } @Override public void handleRequest(HttpServletRequest servletRequest, HttpServletResponse servletResponse,PageContext pageContext) throws ServletException, IOException { ServerHttpRequest request = new ServletServerHttpRequest(servletRequest); ServerHttpResponse response = new ServletServerHttpResponse(servletResponse); HandshakeInterceptorChain chain = new HandshakeInterceptorChain(this.interceptors, this.wsHandler); HandshakeFailureException failure = null; try { if (logger.isDebugEnabled()) { logger.debug(servletRequest.getMethod() + " " + servletRequest.getRequestURI()); } Map<String, Object> attributes = new HashMap<String, Object>(); if (!chain.applyBeforeHandshake(request, response, attributes)) { return; } this.handshakeHandler.doHandshake(request, response, this.wsHandler, attributes); chain.applyAfterHandshake(request, response, null); response.close(); } catch (HandshakeFailureException ex) { failure = ex; } catch (Throwable ex) { failure = new HandshakeFailureException("Uncaught failure for request " + request.getURI(), ex); } finally { if (failure != null) { chain.applyAfterHandshake(request, response, failure); throw failure; } } } }
apache-2.0
finiteloopme/rhiot
datastream/components/camel-device-io/src/test/java/io/rhiot/component/deviceio/URIRegexTest.java
2823
/** * Licensed to the Rhiot under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.rhiot.component.deviceio; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.junit.Assert; import org.junit.Test; public class URIRegexTest { @Test public void schemeGPIOTest() { Pattern p = Pattern.compile(DeviceIOConstants.CAMEL_GPIO_URL_PATTERN); String url = "deviceio-gpio://1"; Matcher m = p.matcher(url); Assert.assertTrue(m.matches()); Assert.assertEquals("1", m.group("gpioId")); Assert.assertEquals("deviceio-gpio", m.group("scheme")); } @Test public void schemeShortGPIOTest() { Pattern p = Pattern.compile(DeviceIOConstants.CAMEL_GPIO_URL_PATTERN); String url = "1"; Matcher m = p.matcher(url); Assert.assertTrue(m.matches()); Assert.assertEquals("1", m.group("gpioId")); } @Test public void optionSplitTest() { Pattern p = Pattern.compile(DeviceIOConstants.CAMEL_SPLIT_REGEX); String url = "ABC_DE"; Matcher m = p.matcher(url); Assert.assertTrue(m.matches()); String[] tot = { "ABC_DE" }; Assert.assertArrayEquals(tot, url.split(DeviceIOConstants.CAMEL_SPLIT)); } @Test public void optionSplitTest2() { Pattern p = Pattern.compile(DeviceIOConstants.CAMEL_SPLIT_REGEX); String url = "ABC_DE|EF_GHI"; Matcher m = p.matcher(url); Assert.assertTrue(m.matches()); String[] tot = { "ABC_DE", "EF_GHI" }; Assert.assertArrayEquals(tot, url.split(DeviceIOConstants.CAMEL_SPLIT)); } @Test public void optionSplitTest3() { Pattern p = Pattern.compile(DeviceIOConstants.CAMEL_SPLIT_REGEX); String url = "ABC_DE|E3F_GHI"; Matcher m = p.matcher(url); Assert.assertFalse(m.matches()); } @Test public void optionSplitTest4() { Pattern p = Pattern.compile(DeviceIOConstants.CAMEL_SPLIT_REGEX); String url = "ABC_DE|EaF_GHI"; Matcher m = p.matcher(url); Assert.assertFalse(m.matches()); } }
apache-2.0
jdeppe-pivotal/geode
geode-gfsh/src/main/java/org/apache/geode/management/internal/cli/shell/jline/ANSIBuffer.java
12130
/* * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved. * * This software is distributable under the BSD license. See the terms of the BSD license in the * documentation provided with this software. */ package org.apache.geode.management.internal.cli.shell.jline; import java.io.BufferedReader; import java.io.InputStreamReader; import org.springframework.shell.support.util.OsUtils; /** * A buffer that can contain ANSI text. * * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a> */ public class ANSIBuffer { private boolean ansiEnabled = true; private final StringBuilder ansiBuffer = new StringBuilder(); private final StringBuilder plainBuffer = new StringBuilder(); public ANSIBuffer() {} public ANSIBuffer(final String str) { append(str); } public void setAnsiEnabled(final boolean ansi) { ansiEnabled = ansi; } public boolean getAnsiEnabled() { return ansiEnabled; } public String getAnsiBuffer() { return ansiBuffer.toString(); } public String getPlainBuffer() { return plainBuffer.toString(); } public String toString(final boolean ansi) { return ansi ? getAnsiBuffer() : getPlainBuffer(); } public String toString() { return toString(ansiEnabled); } public ANSIBuffer append(final String str) { ansiBuffer.append(str); plainBuffer.append(str); return this; } public ANSIBuffer attrib(final String str, final int code) { ansiBuffer.append(ANSICodes.attrib(code)).append(str).append(ANSICodes.attrib(ANSICodes.OFF)); plainBuffer.append(str); return this; } public ANSIBuffer red(final String str) { return attrib(str, ANSICodes.FG_RED); } public ANSIBuffer blue(final String str) { return attrib(str, ANSICodes.FG_BLUE); } public ANSIBuffer green(final String str) { return attrib(str, ANSICodes.FG_GREEN); } public ANSIBuffer black(final String str) { return attrib(str, ANSICodes.FG_BLACK); } public ANSIBuffer yellow(final String str) { return attrib(str, ANSICodes.FG_YELLOW); } public ANSIBuffer magenta(final String str) { return attrib(str, ANSICodes.FG_MAGENTA); } public ANSIBuffer cyan(final String str) { return attrib(str, ANSICodes.FG_CYAN); } public ANSIBuffer bold(final String str) { return attrib(str, ANSICodes.BOLD); } public ANSIBuffer underscore(final String str) { return attrib(str, ANSICodes.UNDERSCORE); } public ANSIBuffer blink(final String str) { return attrib(str, ANSICodes.BLINK); } public ANSIBuffer reverse(final String str) { return attrib(str, ANSICodes.REVERSE); } public static class ANSICodes { static final int OFF = 0; static final int BOLD = 1; static final int UNDERSCORE = 4; static final int BLINK = 5; static final int REVERSE = 7; static final int CONCEALED = 8; static final int FG_BLACK = 30; static final int FG_RED = 31; static final int FG_GREEN = 32; static final int FG_YELLOW = 33; static final int FG_BLUE = 34; static final int FG_MAGENTA = 35; static final int FG_CYAN = 36; static final int FG_WHITE = 37; static final char ESC = 27; /** * Constructor is private since this is a utility class. */ private ANSICodes() {} /** * Sets the screen mode. The mode will be one of the following values: * * <pre> * mode description * ---------------------------------------- * 0 40 x 148 x 25 monochrome (text) * 1 40 x 148 x 25 color (text) * 2 80 x 148 x 25 monochrome (text) * 3 80 x 148 x 25 color (text) * 4 320 x 148 x 200 4-color (graphics) * 5 320 x 148 x 200 monochrome (graphics) * 6 640 x 148 x 200 monochrome (graphics) * 7 Enables line wrapping * 13 320 x 148 x 200 color (graphics) * 14 640 x 148 x 200 color (16-color graphics) * 15 640 x 148 x 350 monochrome (2-color graphics) * 16 640 x 148 x 350 color (16-color graphics) * 17 640 x 148 x 480 monochrome (2-color graphics) * 18 640 x 148 x 480 color (16-color graphics) * 19 320 x 148 x 200 color (256-color graphics) * </pre> */ public static String setmode(final int mode) { return ESC + "[=" + mode + "h"; } /** * Same as setmode () except for mode = 7, which disables line wrapping (useful for writing the * right-most column without scrolling to the next line). */ public static String resetmode(final int mode) { return ESC + "[=" + mode + "l"; } /** * Clears the screen and moves the cursor to the home postition. */ public static String clrscr() { return ESC + "[2J"; } /** * Removes all characters from the current cursor position until the end of the line. */ public static String clreol() { return ESC + "[K"; } /** * Moves the cursor n positions to the left. If n is greater or equal to the current cursor * column, the cursor is moved to the first column. */ public static String left(final int n) { return ESC + "[" + n + "D"; } /** * Moves the cursor n positions to the right. If n plus the current cursor column is greater * than the rightmost column, the cursor is moved to the rightmost column. */ public static String right(final int n) { return ESC + "[" + n + "C"; } /** * Moves the cursor n rows up without changing the current column. If n is greater than or equal * to the current row, the cursor is placed in the first row. */ public static String up(final int n) { return ESC + "[" + n + "A"; } /** * Moves the cursor n rows down. If n plus the current row is greater than the bottom row, the * cursor is moved to the bottom row. */ public static String down(final int n) { return ESC + "[" + n + "B"; } /* * Moves the cursor to the given row and column. (1,1) represents the upper left corner. The * lower right corner of a usual DOS screen is (25, 80). */ public static String gotoxy(final int row, final int column) { return ESC + "[" + row + ";" + column + "H"; } /** * Saves the current cursor position. */ public static String save() { return ESC + "[s"; } /** * Restores the saved cursor position. */ public static String restore() { return ESC + "[u"; } /** * Sets the character attribute. It will be one of the following character attributes: * * <pre> * Text attributes * 0 All attributes off * 1 Bold on * 4 Underscore (on monochrome display adapter only) * 5 Blink on * 7 Reverse video on * 8 Concealed on * * Foreground colors * 30 Black * 31 Red * 32 Green * 33 Yellow * 34 Blue * 35 Magenta * 36 Cyan * 37 White * * Background colors * 40 Black * 41 Red * 42 Green * 43 Yellow * 44 Blue * 45 Magenta * 46 Cyan * 47 White * </pre> * * The attributes remain in effect until the next attribute command is sent. */ public static String attrib(final int attr) { return ESC + "[" + attr + "m"; } /** * Sets the key with the given code to the given value. code must be derived from the following * table, value must be any semicolon-separated combination of String (enclosed in double * quotes) and numeric values. For example, to set F1 to the String "Hello F1", followed by a * CRLF sequence, one can use: ANSI.setkey ("0;59", "\"Hello F1\";13;10"). Heres's the table of * key values: * * <pre> * Key Code SHIFT+code CTRL+code ALT+code * --------------------------------------------------------------- F1 0;59 0;84 0;94 0;104 F2 * 0;60 0;85 0;95 0;105 F3 0;61 0;86 0;96 0;106 F4 0;62 0;87 0;97 0;107 F5 0;63 0;88 0;98 0;108 * F6 0;64 0;89 0;99 0;109 F7 0;65 0;90 0;100 0;110 F8 0;66 0;91 0;101 0;111 F9 0;67 0;92 0;102 * 0;112 F10 0;68 0;93 0;103 0;113 F11 0;133 0;135 0;137 0;139 F12 0;134 0;136 0;138 0;140 HOME * (num keypad) 0;71 55 0;119 -- UP ARROW (num keypad) 0;72 56 (0;141) -- PAGE UP (num keypad) * 0;73 57 0;132 -- LEFT ARROW (num keypad) 0;75 52 0;115 -- RIGHT ARROW (num keypad) 0;77 54 * 0;116 -- END (num keypad) 0;79 49 0;117 -- DOWN ARROW (num keypad) 0;80 50 (0;145) -- PAGE * DOWN (num keypad) 0;81 51 0;118 -- INSERT (num keypad) 0;82 48 (0;146) -- DELETE (num keypad) * 0;83 46 (0;147) -- HOME (224;71) (224;71) (224;119) (224;151) UP ARROW (224;72) (224;72) * (224;141) (224;152) PAGE UP (224;73) (224;73) (224;132) (224;153) LEFT ARROW (224;75) * (224;75) (224;115) (224;155) RIGHT ARROW (224;77) (224;77) (224;116) (224;157) END (224;79) * (224;79) (224;117) (224;159) DOWN ARROW (224;80) (224;80) (224;145) (224;154) PAGE DOWN * (224;81) (224;81) (224;118) (224;161) INSERT (224;82) (224;82) (224;146) (224;162) DELETE * (224;83) (224;83) (224;147) (224;163) PRINT SCREEN -- -- 0;114 -- PAUSE/BREAK -- -- 0;0 -- * BACKSPACE 8 8 127 (0) ENTER 13 -- 10 (0 TAB 9 0;15 (0;148) (0;165) NULL 0;3 -- -- -- A 97 65 * 1 0;30 B 98 66 2 0;48 C 99 66 3 0;46 D 100 68 4 0;32 E 101 69 5 0;18 F 102 70 6 0;33 G 103 71 * 7 0;34 H 104 72 8 0;35 I 105 73 9 0;23 J 106 74 10 0;36 K 107 75 11 0;37 L 108 76 12 0;38 M * 109 77 13 0;50 N 110 78 14 0;49 O 111 79 15 0;24 P 112 80 16 0;25 Q 113 81 17 0;16 R 114 82 * 18 0;19 S 115 83 19 0;31 T 116 84 20 0;20 U 117 85 21 0;22 V 118 86 22 0;47 W 119 87 23 0;17 * X 120 88 24 0;45 Y 121 89 25 0;21 Z 122 90 26 0;44 1 49 33 -- 0;120 2 50 64 0 0;121 3 51 35 * -- 0;122 4 52 36 -- 0;123 5 53 37 -- 0;124 6 54 94 30 0;125 7 55 38 -- 0;126 8 56 42 -- 0;126 * 9 57 40 -- 0;127 0 48 41 -- 0;129 - 45 95 31 0;130 = 61 43 --- 0;131 [ 91 123 27 0;26 ] 93 * 125 29 0;27 92 124 28 0;43 ; 59 58 -- 0;39 ' 39 34 -- 0;40 , 44 60 -- 0;51 . 46 62 -- 0;52 / * 47 63 -- 0;53 ` 96 126 -- (0;41) ENTER (keypad) 13 -- 10 (0;166) / (keypad) 47 47 (0;142) * (0;74) * (keypad) 42 (0;144) (0;78) -- - (keypad) 45 45 (0;149) (0;164) + (keypad) 43 43 * (0;150) (0;55) 5 (keypad) (0;76) 53 (0;143) -- */ public static String setkey(final String code, final String value) { return ESC + "[" + code + ";" + value + "p"; } } public static void main(final String[] args) throws Exception { // sequence, one can use: ANSI.setkey ("0;59", "\"Hello F1\";13;10"). BufferedReader reader = new BufferedReader(new InputStreamReader(System.in)); System.out.print(ANSICodes.setkey("97", "97;98;99;13") + ANSICodes.attrib(ANSICodes.OFF)); System.out.flush(); String line; while ((line = reader.readLine()) != null) { System.out.println("GOT: " + line); } } private static final boolean ROO_BRIGHT_COLORS = Boolean.getBoolean("roo.bright"); private static final boolean SHELL_BRIGHT_COLORS = Boolean.getBoolean("spring.shell.bright"); private static final boolean BRIGHT_COLORS = ROO_BRIGHT_COLORS || SHELL_BRIGHT_COLORS; public static ANSIBuffer getANSIBuffer() { final char esc = (char) 27; return new ANSIBuffer() { @Override public ANSIBuffer reverse(final String str) { if (OsUtils.isWindows()) { return super.reverse(str).append(ANSICodes.attrib(esc)); } return super.reverse(str); } @Override public ANSIBuffer attrib(final String str, final int code) { if (BRIGHT_COLORS && 30 <= code && code <= 37) { // This is a color code: add a 'bright' code return append(esc + "[" + code + ";1m").append(str).append(ANSICodes.attrib(0)); } return super.attrib(str, code); } }; } }
apache-2.0
ButterflyNetwork/bazel
src/main/java/com/google/devtools/build/lib/packages/WorkspaceFactoryHelper.java
4246
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.packages; import com.google.common.base.Preconditions; import com.google.common.base.Verify; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.LabelSyntaxException; import com.google.devtools.build.lib.events.Location; import com.google.devtools.build.lib.events.StoredEventHandler; import com.google.devtools.build.lib.packages.RuleFactory.BuildLangTypedAttributeValuesMap; import com.google.devtools.build.lib.syntax.FuncallExpression; import java.util.Map; /** A helper for the {@link WorkspaceFactory} to create repository rules */ public class WorkspaceFactoryHelper { public static Rule createAndAddRepositoryRule( Package.Builder pkg, RuleClass ruleClass, RuleClass bindRuleClass, Map<String, Object> kwargs, FuncallExpression ast) throws RuleFactory.InvalidRuleException, Package.NameConflictException, LabelSyntaxException, InterruptedException { StoredEventHandler eventHandler = new StoredEventHandler(); BuildLangTypedAttributeValuesMap attributeValues = new BuildLangTypedAttributeValuesMap(kwargs); Rule rule = RuleFactory.createRule( pkg, ruleClass, attributeValues, eventHandler, ast, ast.getLocation(), /*env=*/ null, new AttributeContainer(ruleClass)); pkg.addEvents(eventHandler.getEvents()); pkg.addPosts(eventHandler.getPosts()); overwriteRule(pkg, rule); for (Map.Entry<String, Label> entry : ruleClass.getExternalBindingsFunction().apply(rule).entrySet()) { Label nameLabel = Label.parseAbsolute("//external:" + entry.getKey(), ImmutableMap.of()); addBindRule( pkg, bindRuleClass, nameLabel, entry.getValue(), rule.getLocation(), new AttributeContainer(bindRuleClass)); } return rule; } static void addBindRule( Package.Builder pkg, RuleClass bindRuleClass, Label virtual, Label actual, Location location, AttributeContainer attributeContainer) throws RuleFactory.InvalidRuleException, Package.NameConflictException, InterruptedException { Map<String, Object> attributes = Maps.newHashMap(); // Bound rules don't have a name field, but this works because we don't want more than one // with the same virtual name. attributes.put("name", virtual.getName()); if (actual != null) { attributes.put("actual", actual); } StoredEventHandler handler = new StoredEventHandler(); BuildLangTypedAttributeValuesMap attributeValues = new BuildLangTypedAttributeValuesMap(attributes); Rule rule = RuleFactory.createRule( pkg, bindRuleClass, attributeValues, handler, /*ast=*/ null, location, /*env=*/ null, attributeContainer); overwriteRule(pkg, rule); rule.setVisibility(ConstantRuleVisibility.PUBLIC); } private static void overwriteRule(Package.Builder pkg, Rule rule) throws Package.NameConflictException, InterruptedException { Preconditions.checkArgument(rule.getOutputFiles().isEmpty()); Target old = pkg.targets.get(rule.getName()); if (old != null) { if (old instanceof Rule) { Verify.verify(((Rule) old).getOutputFiles().isEmpty()); } pkg.targets.remove(rule.getName()); } pkg.addRule(rule); } }
apache-2.0
danielnorberg/auto-matter
processor/src/test/resources/good/NullableFields.java
367
package foo; import com.google.common.base.Optional; import io.norberg.automatter.AutoMatter; @AutoMatter public interface NullableFields { @javax.annotation.Nullable String nullableFoo(); @Nullable(simple = "foo", complex = java.util.Date.class) String customNullableBar(); String nonNullQuux(); @Nullable(complex = String.class) int nonNullPrimitive(); }
apache-2.0
magnet/bnd
biz.aQute.repository/src/aQute/bnd/repository/maven/provider/Configuration.java
664
package aQute.bnd.repository.maven.provider; import org.osgi.annotation.versioning.ProviderType; @ProviderType public interface Configuration { /** * The url to the remote release repository. */ String releaseUrl(); /** * The url to the remote snapshot repository. If this is not specified, * it falls back to the release repository or just local if this is also * not specified. */ String snapshotUrl(); /** * The path to the local repository */ // default "~/.m2/repository" String local(String deflt); // default false boolean readOnly(); String name(String deflt); String index(String deflt); boolean noupdateOnRelease(); }
apache-2.0
diego10j/produquimic
produquimic/produquimic-war/src/java/paq_general/pre_sucursal_area_departamento.java
7677
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package paq_general; import framework.aplicacion.Fila; import framework.componentes.Combo; import framework.componentes.Division; import framework.componentes.Etiqueta; import framework.componentes.PanelTabla; import framework.componentes.Reporte; import framework.componentes.SeleccionFormatoReporte; import framework.componentes.SeleccionTabla; import framework.componentes.Tabla; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.faces.event.AjaxBehaviorEvent; import org.apache.poi.hssf.record.formula.Ptg; import org.primefaces.component.tabview.Tab; import org.primefaces.event.SelectEvent; import sistema.aplicacion.Pantalla; public class pre_sucursal_area_departamento extends Pantalla { private Tabla tab_tabla1 = new Tabla(); private Tabla tab_area_dep = new Tabla(); private Reporte rep_reporte = new Reporte(); private SeleccionFormatoReporte sef_reporte = new SeleccionFormatoReporte(); private SeleccionTabla set_sucursal = new SeleccionTabla(); // public pre_sucursal_area_departamento() { rep_reporte.setId("rep_reporte"); rep_reporte.getBot_aceptar().setMetodo("aceptarReporte"); bar_botones.agregarReporte(); tab_area_dep.setId("tab_area_dep"); tab_area_dep.setSql("select DEP.IDE_GEDEP,ARE.IDE_GEARE,DETALLE_GEARE,DEP.DETALLE_GEDEP " + "from GEN_AREA ARE " + "INNER JOIN GEN_DEPARTAMENTO DEP ON ARE.IDE_GEARE=DEP.IDE_GEARE " + "ORDER BY DETALLE_GEARE ASC, DETALLE_GEDEP ASC"); tab_area_dep.setCampoPrimaria("IDE_GEDEP"); tab_area_dep.setNumeroTabla(1); tab_area_dep.getColumna("DETALLE_GEARE").setNombreVisual("PROCESO"); tab_area_dep.getColumna("DETALLE_GEDEP").setNombreVisual("SUB - PROCESO"); tab_area_dep.getColumna("DETALLE_GEARE").setFiltro(true); tab_area_dep.getColumna("DETALLE_GEDEP").setFiltro(true); tab_area_dep.getColumna("IDE_GEARE").setVisible(false); tab_area_dep.getColumna("IDE_GEDEP").setVisible(false); tab_area_dep.setLectura(true); tab_area_dep.setRows(10); tab_area_dep.onSelect("filtrarDepartamentoSucursal"); tab_area_dep.dibujar(); PanelTabla pat_panel2 = new PanelTabla(); pat_panel2.getMenuTabla().getItem_formato().setDisabled(true); pat_panel2.setPanelTabla(tab_area_dep); tab_tabla1.setId("tab_tabla1"); tab_tabla1.setGenerarPrimaria(false); tab_tabla1.setTabla("GEN_DEPARTAMENTO_SUCURSAL", "IDE_SUCU", 2); tab_tabla1.getColumna("IDE_GEDEP").setVisible(false); tab_tabla1.getColumna("IDE_GEARE").setVisible(false); tab_tabla1.getColumna("IDE_GEDEP").setUnico(true); tab_tabla1.getColumna("IDE_GEARE").setUnico(true); tab_tabla1.getColumna("IDE_SUCU").setUnico(true); tab_tabla1.getColumna("IDE_SUCU").setCombo("SIS_SUCURSAL", "IDE_SUCU", "NOM_SUCU",""); tab_tabla1.getColumna("ACTIVO_GEDES").setCheck(); tab_tabla1.getColumna("ACTIVO_GEDES").setValorDefecto("true"); tab_tabla1.setMostrarcampoSucursal(true); tab_tabla1.setRows(10); tab_tabla1.setCondicion("IDE_GEARE="+tab_area_dep.getValor("IDE_GEARE")+" and IDE_GEDEP="+tab_area_dep.getValor("IDE_GEDEP")); tab_tabla1.dibujar(); tab_tabla1.getColumna("IDE_SUCU").setExterna(false); PanelTabla pat_panel1 = new PanelTabla(); pat_panel1.setMensajeWarn("SUB - PROCESO SUCURSAL"); pat_panel1.setPanelTabla(tab_tabla1); Division div_division = new Division(); div_division.setId("div_division"); div_division.dividir2(pat_panel2,pat_panel1,"60%","H"); agregarComponente(div_division); sef_reporte.setId("sef_reporte"); agregarComponente(rep_reporte); agregarComponente(sef_reporte); set_sucursal.setId("set_sucursal"); set_sucursal.setSeleccionTabla("SIS_SUCURSAL", "IDE_SUCU", "NOM_SUCU"); set_sucursal.getBot_aceptar().setMetodo("aceptarReporte"); agregarComponente(set_sucursal); } public void filtrarDepartamentoSucursal(SelectEvent evt){ tab_area_dep.seleccionarFila(evt); tab_tabla1.setCondicion("IDE_GEARE="+tab_area_dep.getValor("IDE_GEARE")+" and IDE_GEDEP="+tab_area_dep.getValor("IDE_GEDEP")); tab_tabla1.ejecutarSql(); } @Override public void abrirListaReportes() { // TODO Auto-generated method stub rep_reporte.dibujar(); } Map p_parametros= new HashMap(); public void aceptarReporte(){ if (rep_reporte.getReporteSelecionado().equals("Detalle Sucursal Area Departamento")){ if (rep_reporte.isVisible()){ p_parametros=new HashMap(); rep_reporte.cerrar(); p_parametros.put("titulo", "Detalle Sucursal Area Departamento"); set_sucursal.setTitle("SELECCIONE SUCURSAL"); set_sucursal.dibujar(); utilitario.addUpdate("rep_reporte,set_sucursal"); }else if(set_sucursal.isVisible()) { p_parametros.put("SUCURSAL", set_sucursal.getSeleccionados()); set_sucursal.cerrar(); sef_reporte.setSeleccionFormatoReporte(p_parametros, rep_reporte.getPath()); sef_reporte.dibujar(); utilitario.addUpdate("sef_reporte,set_sucursal"); } } } public Reporte getRep_reporte() { return rep_reporte; } public void setRep_reporte(Reporte rep_reporte) { this.rep_reporte = rep_reporte; } public SeleccionFormatoReporte getSef_reporte() { return sef_reporte; } public void setSef_reporte(SeleccionFormatoReporte sef_reporte) { this.sef_reporte = sef_reporte; } @Override public void insertar() { if (tab_tabla1.isFocus()) { tab_tabla1.insertar(); tab_tabla1.setValor("IDE_GEARE", tab_area_dep.getValor("IDE_GEARE")); tab_tabla1.setValor("IDE_GEDEP", tab_area_dep.getValor("IDE_GEDEP")); } } @Override public void guardar() { if (tab_tabla1.guardar()) { guardarPantalla(); } } @Override public void eliminar() { if (tab_tabla1.isFocus()) { if (tab_tabla1.eliminar()) { } } } @Override public void inicio() { // TODO Auto-generated method stub super.inicio(); tab_tabla1.setCondicion("IDE_GEARE="+tab_area_dep.getValor("IDE_GEARE")+" and IDE_GEDEP="+tab_area_dep.getValor("IDE_GEDEP")); tab_tabla1.ejecutarSql(); } @Override public void siguiente() { // TODO Auto-generated method stub super.siguiente(); tab_tabla1.setCondicion("IDE_GEARE="+tab_area_dep.getValor("IDE_GEARE")+" and IDE_GEDEP="+tab_area_dep.getValor("IDE_GEDEP")); tab_tabla1.ejecutarSql(); } @Override public void atras() { // TODO Auto-generated method stub super.atras(); tab_tabla1.setCondicion("IDE_GEARE="+tab_area_dep.getValor("IDE_GEARE")+" and IDE_GEDEP="+tab_area_dep.getValor("IDE_GEDEP")); tab_tabla1.ejecutarSql(); } @Override public void fin() { // TODO Auto-generated method stub super.fin(); tab_tabla1.setCondicion("IDE_GEARE="+tab_area_dep.getValor("IDE_GEARE")+" and IDE_GEDEP="+tab_area_dep.getValor("IDE_GEDEP")); tab_tabla1.ejecutarSql(); } @Override public void actualizar() { // TODO Auto-generated method stub super.actualizar(); } public Tabla getTab_tabla1() { return tab_tabla1; } public void setTab_tabla1(Tabla tab_tabla1) { this.tab_tabla1 = tab_tabla1; } public SeleccionTabla getSet_sucursal() { return set_sucursal; } public void setSet_sucursal(SeleccionTabla set_sucursal) { this.set_sucursal = set_sucursal; } public Tabla getTab_area_dep() { return tab_area_dep; } public void setTab_area_dep(Tabla tab_area_dep) { this.tab_area_dep = tab_area_dep; } }
apache-2.0
hugosato/apache-axis
src/org/apache/axis/encoding/DefaultSOAP12TypeMappingImpl.java
365
package org.apache.axis.encoding; /** * @author James M Snell <jasnell@us.ibm.com> * @deprecated Please use DefaultSOAPEncodingTypeMappingImpl.java * @see org.apache.axis.encoding.DefaultSOAPEncodingTypeMappingImpl */ public class DefaultSOAP12TypeMappingImpl extends DefaultSOAPEncodingTypeMappingImpl { public DefaultSOAP12TypeMappingImpl() { } }
apache-2.0
jexp/idea2
platform/platform-api/src/com/intellij/openapi/options/ConfigurableGroup.java
914
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.options; /** * Created by IntelliJ IDEA. * User: max * Date: Dec 9, 2003 * Time: 3:21:22 PM * To change this template use Options | File Templates. */ public interface ConfigurableGroup extends Configurable.Composite { String getDisplayName(); String getShortName(); }
apache-2.0
PetrGasparik/midpoint
gui/admin-gui/src/main/java/com/evolveum/midpoint/web/component/AjaxIconButton.java
1875
/* * Copyright (c) 2010-2016 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.web.component; import org.apache.commons.lang.StringUtils; import org.apache.wicket.AttributeModifier; import org.apache.wicket.ajax.markup.html.AjaxLink; import org.apache.wicket.markup.ComponentTag; import org.apache.wicket.markup.MarkupStream; import org.apache.wicket.markup.parser.XmlTag; import org.apache.wicket.model.IModel; /** * @author Viliam Repan (lazyman) */ public abstract class AjaxIconButton extends AjaxLink<String> { public AjaxIconButton(String id, IModel<String> icon, IModel<String> title) { super(id, icon); if (title != null) { add(AttributeModifier.replace("title", title)); } } @Override public void onComponentTagBody(final MarkupStream markupStream, final ComponentTag openTag) { String icon = getModelObject(); if (StringUtils.isNotEmpty(icon)) { replaceComponentTagBody(markupStream, openTag, "<i class=\"" + icon + "\"></i>"); return; } super.onComponentTagBody(markupStream, openTag); } @Override protected void onComponentTag(ComponentTag tag) { super.onComponentTag(tag); if (tag.isOpenClose()) { tag.setType(XmlTag.TagType.OPEN); } } }
apache-2.0
Gaduo/hapi-fhir
hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/model/codesystems/MessageTransportEnumFactory.java
2666
package org.hl7.fhir.dstu3.model.codesystems; /* Copyright (c) 2011+, HL7, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of HL7 nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // Generated on Tue, Dec 6, 2016 09:42-0500 for FHIR v1.8.0 import org.hl7.fhir.dstu3.model.EnumFactory; public class MessageTransportEnumFactory implements EnumFactory<MessageTransport> { public MessageTransport fromCode(String codeString) throws IllegalArgumentException { if (codeString == null || "".equals(codeString)) return null; if ("http".equals(codeString)) return MessageTransport.HTTP; if ("ftp".equals(codeString)) return MessageTransport.FTP; if ("mllp".equals(codeString)) return MessageTransport.MLLP; throw new IllegalArgumentException("Unknown MessageTransport code '"+codeString+"'"); } public String toCode(MessageTransport code) { if (code == MessageTransport.HTTP) return "http"; if (code == MessageTransport.FTP) return "ftp"; if (code == MessageTransport.MLLP) return "mllp"; return "?"; } public String toSystem(MessageTransport code) { return code.getSystem(); } }
apache-2.0
vt0r/vector-android
vector/src/main/java/im/vector/store/LoginStorage.java
7093
/* * Copyright 2015 OpenMarket Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package im.vector.store; import android.content.Context; import android.content.SharedPreferences; import android.util.Log; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.matrix.androidsdk.HomeserverConnectionConfig; import java.util.ArrayList; /** * Stores login credentials in SharedPreferences. */ public class LoginStorage { private static final String LOG_TAG = "LoginStorage"; private static final String PREFS_LOGIN = "Vector.LoginStorage"; // multi accounts + home server config private static final String PREFS_KEY_CONNECTION_CONFIGS = "PREFS_KEY_CONNECTION_CONFIGS"; private final Context mContext; public LoginStorage(Context appContext) { mContext = appContext.getApplicationContext(); } /** * @return the list of home server configurations. */ public ArrayList<HomeserverConnectionConfig> getCredentialsList() { SharedPreferences prefs = mContext.getSharedPreferences(PREFS_LOGIN, Context.MODE_PRIVATE); String connectionConfigsString = prefs.getString(PREFS_KEY_CONNECTION_CONFIGS, null); Log.d(LOG_TAG, "Got connection json: "); if (connectionConfigsString == null) { return new ArrayList<>(); } try { JSONArray connectionConfigsStrings = new JSONArray(connectionConfigsString); ArrayList<HomeserverConnectionConfig> configList = new ArrayList<>( connectionConfigsStrings.length() ); for (int i = 0; i < connectionConfigsStrings.length(); i++) { configList.add( HomeserverConnectionConfig.fromJson(connectionConfigsStrings.getJSONObject(i)) ); } return configList; } catch (JSONException e) { Log.e(LOG_TAG, "Failed to deserialize accounts " + e.getMessage(), e); throw new RuntimeException("Failed to deserialize accounts"); } } /** * Add a credentials to the credentials list * @param config the home server config to add. */ public void addCredentials(HomeserverConnectionConfig config) { if (null != config && config.getCredentials() != null) { SharedPreferences prefs = mContext.getSharedPreferences(PREFS_LOGIN, Context.MODE_PRIVATE); SharedPreferences.Editor editor = prefs.edit(); ArrayList<HomeserverConnectionConfig> configs = getCredentialsList(); configs.add(config); ArrayList<JSONObject> serialized = new ArrayList<>(configs.size()); try { for (HomeserverConnectionConfig c : configs) { serialized.add(c.toJson()); } } catch (JSONException e) { throw new RuntimeException("Failed to serialize connection config"); } String ser = new JSONArray(serialized).toString(); Log.d(LOG_TAG, "Storing " + serialized.size() + " credentials"); editor.putString(PREFS_KEY_CONNECTION_CONFIGS, ser); editor.apply(); } } /** * Remove the credentials from credentials list * @param config the credentials to remove */ public void removeCredentials(HomeserverConnectionConfig config) { if (null != config && config.getCredentials() != null) { Log.d(LOG_TAG, "Removing account: " + config.getCredentials().userId); SharedPreferences prefs = mContext.getSharedPreferences(PREFS_LOGIN, Context.MODE_PRIVATE); SharedPreferences.Editor editor = prefs.edit(); ArrayList<HomeserverConnectionConfig> configs = getCredentialsList(); ArrayList<JSONObject> serialized = new ArrayList<>(configs.size()); boolean found = false; try { for (HomeserverConnectionConfig c : configs) { if (c.getCredentials().userId.equals(config.getCredentials().userId)) { found = true; } else { serialized.add(c.toJson()); } } } catch (JSONException e) { throw new RuntimeException("Failed to serialize connection config"); } if (!found) return; String ser = new JSONArray(serialized).toString(); Log.d(LOG_TAG, "Storing " + serialized.size() + " credentials"); editor.putString(PREFS_KEY_CONNECTION_CONFIGS, ser); editor.apply(); } } /** * Replace the credential from credentials list, based on credentials.userId. * If it does not match an existing credential it does *not* insert the new credentials. * @param config the credentials to insert */ public void replaceCredentials(HomeserverConnectionConfig config) { if (null != config && config.getCredentials() != null) { SharedPreferences prefs = mContext.getSharedPreferences(PREFS_LOGIN, Context.MODE_PRIVATE); SharedPreferences.Editor editor = prefs.edit(); ArrayList<HomeserverConnectionConfig> configs = getCredentialsList(); ArrayList<JSONObject> serialized = new ArrayList<>(configs.size()); boolean found = false; try { for (HomeserverConnectionConfig c : configs) { if (c.getCredentials().userId.equals(config.getCredentials().userId)) { serialized.add(config.toJson()); found = true; } else { serialized.add(c.toJson()); } } } catch (JSONException e) { throw new RuntimeException("Failed to serialize connection config"); } if (!found) return; String ser = new JSONArray(serialized).toString(); Log.d(LOG_TAG, "Storing " + serialized.size() + " credentials"); editor.putString(PREFS_KEY_CONNECTION_CONFIGS, ser); editor.apply(); } } /** * Clear the stored values */ public void clear() { SharedPreferences prefs = mContext.getSharedPreferences(PREFS_LOGIN, Context.MODE_PRIVATE); SharedPreferences.Editor editor = prefs.edit(); editor.remove(PREFS_KEY_CONNECTION_CONFIGS); editor.apply(); } }
apache-2.0
candyam5522/eureka
eureka-common/src/main/java/edu/emory/cci/aiw/cvrg/eureka/common/entity/RoleEntity.java
4252
/* * #%L * Eureka Common * %% * Copyright (C) 2012 - 2013 Emory University * %% * This program is dual licensed under the Apache 2 and GPLv3 licenses. * * Apache License, Version 2.0: * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * GNU General Public License version 3: * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public * License along with this program. If not, see * <http://www.gnu.org/licenses/gpl-3.0.html>. * #L% */ package edu.emory.cci.aiw.cvrg.eureka.common.entity; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.SequenceGenerator; import javax.persistence.Table; import javax.persistence.Column; import org.apache.commons.lang3.builder.ToStringBuilder; /** * A bean class to hold information related to roles in the system. * * @author hrathod * */ @Entity @Table(name = "roles") public class RoleEntity implements org.eurekaclinical.standardapis.entity.RoleEntity { /** * The role's unique identifier. */ @Id @SequenceGenerator(name = "ROLE_SEQ_GENERATOR", sequenceName = "ROLE_SEQ", allocationSize = 1) @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "ROLE_SEQ_GENERATOR") private Long id; /** * The role's name. */ @Column(unique = true, nullable = false) private String name; /** * Is this role a default role? Default roles are assigned to all new users. */ private boolean defaultRole; /** * Get the role's identification number. * * @return A {@link Long} representing the role's id. */ @Override public Long getId() { return this.id; } /** * Set the role's identification number. * * @param inId The number representing the role's id. */ @Override public void setId(Long inId) { this.id = inId; } /** * Get the role's name. * * @return A String containing the role's name. */ @Override public String getName() { return this.name; } /** * Set the role's name. * * @param inName A string containing the role's name. */ @Override public void setName(String inName) { this.name = inName; } /** * Is this role a default role? * * @return True if the role is a default role, false otherwise. */ @Override public boolean isDefaultRole() { return this.defaultRole; } /** * Set the role's default flag. * * @param inDefaultRole True or False, True indicating a default role, False * indicating a non-default role. */ @Override public void setDefaultRole(boolean inDefaultRole) { this.defaultRole = inDefaultRole; } @Override public String toString() { return ToStringBuilder.reflectionToString(this); } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof RoleEntity)) return false; RoleEntity role = (RoleEntity) o; if (defaultRole != role.defaultRole) return false; if (!id.equals(role.id)) return false; if (!name.equals(role.name)) return false; return true; } @Override public int hashCode() { int result = id.hashCode(); result = 31 * result + name.hashCode(); result = 31 * result + (defaultRole ? 1 : 0); return result; } }
apache-2.0
jacksonokuhn/dataverse
src/main/java/edu/harvard/iq/dataverse/FileMetadata.java
16763
package edu.harvard.iq.dataverse; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.annotations.Expose; import java.io.Serializable; import java.sql.Timestamp; import java.text.DateFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.Date; import java.util.LinkedList; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import javax.json.Json; import javax.json.JsonArrayBuilder; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.Index; import javax.persistence.JoinColumn; import javax.persistence.JoinTable; import javax.persistence.ManyToMany; import javax.persistence.ManyToOne; import javax.persistence.OrderBy; import javax.persistence.Table; import javax.persistence.Transient; import javax.persistence.Version; import org.hibernate.validator.constraints.NotBlank; import javax.validation.constraints.Pattern; /** * * @author skraffmiller */ @Table(indexes = {@Index(columnList="datafile_id"), @Index(columnList="datasetversion_id")} ) @Entity public class FileMetadata implements Serializable { private static final long serialVersionUID = 1L; private static final DateFormat displayDateFormat = DateFormat.getDateInstance(DateFormat.MEDIUM); private static final Logger logger = Logger.getLogger(FileMetadata.class.getCanonicalName()); @Expose @Pattern(regexp="^[^:<>;#/\"\\*\\|\\?\\\\]*$", message = "File Name cannot contain any of the following characters: \\ / : * ? \" < > | ; # .") @NotBlank(message = "Please specify a file name.") @Column( nullable=false ) private String label = ""; @Pattern(regexp="|[^/\\\\]|^[^/\\\\]+.*[^/\\\\]+$", message = "Directory Name cannot contain leading or trailing file separators.") @Expose @Column ( nullable=true ) private String directoryLabel; @Column(columnDefinition = "TEXT") private String description = ""; @Expose private boolean restricted; @ManyToOne @JoinColumn(nullable=false) private DatasetVersion datasetVersion; @ManyToOne @JoinColumn(nullable=false) private DataFile dataFile; /** * Creates a copy of {@code this}, with identical business logic fields. * E.g., {@link #label} would be duplicated; {@link #version} will not. * * @return A copy of {@code this}, except for the DB-related data. */ public FileMetadata createCopy() { FileMetadata fmd = new FileMetadata(); fmd.setCategories(new LinkedList<>(getCategories()) ); fmd.setDataFile( getDataFile() ); fmd.setDatasetVersion( getDatasetVersion() ); fmd.setDescription( getDescription() ); fmd.setLabel( getLabel() ); fmd.setRestricted( isRestricted() ); return fmd; } public String getLabel() { return label; } public void setLabel(String label) { this.label = label; } public String getDirectoryLabel() { return directoryLabel; } public void setDirectoryLabel(String directoryLabel) { this.directoryLabel = directoryLabel; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public boolean isRestricted() { return restricted; } public void setRestricted(boolean restricted) { this.restricted = restricted; } /* * File Categories to which this version of the DataFile belongs: */ @ManyToMany @JoinTable(indexes = {@Index(columnList="filecategories_id"),@Index(columnList="filemetadatas_id")}) @OrderBy("name") private List<DataFileCategory> fileCategories; public List<DataFileCategory> getCategories() { return fileCategories; } public void setCategories(List<DataFileCategory> fileCategories) { this.fileCategories = fileCategories; } public void addCategory(DataFileCategory category) { if (fileCategories == null) { fileCategories = new ArrayList<>(); } fileCategories.add(category); } /** * Retrieve categories * @return */ public List<String> getCategoriesByName() { ArrayList<String> ret = new ArrayList<>(); if (fileCategories == null) { return ret; } for (int idx=0; idx < fileCategories.size(); idx++){ ret.add(fileCategories.get(idx).getName()); } // fileCategories.stream() // .map(x -> ret.add(x.getName())); return ret; } public JsonArrayBuilder getCategoryNamesAsJsonArrayBuilder() { JsonArrayBuilder builder = Json.createArrayBuilder(); if (fileCategories == null) { return builder; } for (int idx=0; idx < fileCategories.size(); idx++){ builder.add(fileCategories.get(idx).getName()); } //fileCategories.stream() // .map(x -> builder.add(x.getName())); return builder; } // alternative, experimental method: public void setCategoriesByName(List<String> newCategoryNames) { setCategories(null); // ?? TODO: investigate! if (newCategoryNames != null) { for (int i = 0; i < newCategoryNames.size(); i++) { // Dataset.getCategoryByName() will check if such a category // already exists for the parent dataset; it will be created // if not. The method will return null if the supplied // category name is null or empty. -- L.A. 4.0 beta 10 DataFileCategory fileCategory = null; try { // Using "try {}" to catch any null pointer exceptions, // just in case: fileCategory = this.getDatasetVersion().getDataset().getCategoryByName(newCategoryNames.get(i)); } catch (Exception ex) { fileCategory = null; } if (fileCategory != null) { this.addCategory(fileCategory); fileCategory.addFileMetadata(this); } } } } /* note that this version only *adds* new categories, but does not remove the ones that has been unchecked! public void setCategoriesByName(List<String> newCategoryNames) { if (newCategoryNames != null) { Collection<String> oldCategoryNames = getCategoriesByName(); for (int i = 0; i < newCategoryNames.size(); i++) { if (!oldCategoryNames.contains(newCategoryNames.get(i))) { // Dataset.getCategoryByName() will check if such a category // already exists for the parent dataset; it will be created // if not. The method will return null if the supplied // category name is null or empty. -- L.A. 4.0 beta 10 DataFileCategory fileCategory = null; try { // Using "try {}" to catch any null pointer exceptions, // just in case: fileCategory = this.getDatasetVersion().getDataset().getCategoryByName(newCategoryNames.get(i)); } catch (Exception ex) { fileCategory = null; } if (fileCategory != null) { this.addCategory(fileCategory); fileCategory.addFileMetadata(this); } } } } } */ public void addCategoryByName(String newCategoryName) { if (newCategoryName != null && !newCategoryName.equals("")) { Collection<String> oldCategoryNames = getCategoriesByName(); if (!oldCategoryNames.contains(newCategoryName)) { DataFileCategory fileCategory = null; // Dataset.getCategoryByName() will check if such a category // already exists for the parent dataset; it will be created // if not. The method will return null if the supplied // category name is null or empty. -- L.A. 4.0 beta 10 try { // Using "try {}" to catch any null pointer exceptions, // just in case: fileCategory = this.getDatasetVersion().getDataset().getCategoryByName(newCategoryName); } catch (Exception ex) { fileCategory = null; } if (fileCategory != null) { logger.log(Level.FINE, "Found file category for {0}", newCategoryName); this.addCategory(fileCategory); fileCategory.addFileMetadata(this); } else { logger.log(Level.INFO, "Could not find file category for {0}", newCategoryName); } } else { // don't do anything - this file metadata already belongs to // this category. } } } public String getFileDateToDisplay() { Date fileDate = null; DataFile datafile = this.getDataFile(); if (datafile != null) { boolean fileHasBeenReleased = datafile.isReleased(); if (fileHasBeenReleased) { Timestamp filePublicationTimestamp = datafile.getPublicationDate(); if (filePublicationTimestamp != null) { fileDate = filePublicationTimestamp; } } else { Timestamp fileCreateTimestamp = datafile.getCreateDate(); if (fileCreateTimestamp != null) { fileDate = fileCreateTimestamp; } } } if (fileDate != null) { return displayDateFormat.format(fileDate); } return ""; } public String getFileCitation(){ return getFileCitation(false); } public String getFileCitation(boolean html){ String citation = this.getDatasetVersion().getCitation(html); /* ", #{FilePage.fileMetadata.label} [fileName]" <h:outputText value=", #{FilePage.file.unf}" rendered="#{FilePage.file.tabularData and !(empty FilePage.file.unf)}"/> */ citation += "; " + this.getLabel() + " [fileName]" ; if (this.dataFile.isTabularData() && this.dataFile.getUnf() != null && !this.dataFile.getUnf().isEmpty()){ citation += ", " + this.dataFile.getUnf() + " [fileUNF]"; } return citation; } public DatasetVersion getDatasetVersion() { return datasetVersion; } public void setDatasetVersion(DatasetVersion datasetVersion) { this.datasetVersion = datasetVersion; } public DataFile getDataFile() { return dataFile; } public void setDataFile(DataFile dataFile) { this.dataFile = dataFile; } @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Long id; /** * Getter for property id. * @return Value of property id. */ public Long getId() { return this.id; } /** * Setter for property id. * @param id New value of property id. */ public void setId(Long id) { this.id = id; } @Version private Long version; /** * Getter for property version. * @return Value of property version. */ public Long getVersion() { return this.version; } /** * Setter for property version. * @param version New value of property version. */ public void setVersion(Long version) { this.version = version; } @Transient private boolean selected; public boolean isSelected() { return selected; } public void setSelected(boolean selected) { this.selected = selected; } @Transient private boolean restrictedUI; public boolean isRestrictedUI() { return restrictedUI; } public void setRestrictedUI(boolean restrictedUI) { this.restrictedUI = restrictedUI; } @Transient private FileVersionDifference fileVersionDifference ; public FileVersionDifference getFileVersionDifference() { return fileVersionDifference; } public void setFileVersionDifference(FileVersionDifference fileVersionDifference) { this.fileVersionDifference = fileVersionDifference; } @Transient private String contributorNames; public String getContributorNames() { return contributorNames; } public void setContributorNames(String contributorNames) { this.contributorNames = contributorNames; } @Override public int hashCode() { int hash = 0; hash += (id != null ? id.hashCode() : 0); return hash; } @Override public boolean equals(Object object) { if (!(object instanceof FileMetadata)) { return false; } FileMetadata other = (FileMetadata) object; return !((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))); } /* * An experimental method for comparing 2 file metadatas *by content*; i.e., * this would be for checking 2 metadatas from 2 different versions, to * determine if any of the actual metadata fields have changed between * versions. */ public boolean contentEquals(FileMetadata other) { if (other == null) { return false; } if (this.getLabel() != null) { if (!this.getLabel().equals(other.getLabel())) { return false; } } else if (other.getLabel() != null) { return false; } if (this.getDirectoryLabel() != null) { if (!this.getDirectoryLabel().equals(other.getDirectoryLabel())) { return false; } } else if (other.getDirectoryLabel() != null) { return false; } if (this.getDescription() != null) { if (!this.getDescription().equals(other.getDescription())) { return false; } } else if (other.getDescription() != null) { return false; } return true; } @Override public String toString() { return "edu.harvard.iq.dvn.core.study.FileMetadata[id=" + id + "]"; } public static final Comparator<FileMetadata> compareByLabel = new Comparator<FileMetadata>() { @Override public int compare(FileMetadata o1, FileMetadata o2) { return o1.getLabel().toUpperCase().compareTo(o2.getLabel().toUpperCase()); } }; public String toPrettyJSON(){ return serializeAsJSON(true); } public String toJSON(){ return serializeAsJSON(false); } /** * * @param prettyPrint * @return */ private String serializeAsJSON(boolean prettyPrint){ JsonObject jsonObj = asGsonObject(prettyPrint); return jsonObj.toString(); } public JsonObject asGsonObject(boolean prettyPrint){ GsonBuilder builder; if (prettyPrint){ // Add pretty printing builder = new GsonBuilder().excludeFieldsWithoutExposeAnnotation().setPrettyPrinting(); }else{ builder = new GsonBuilder().excludeFieldsWithoutExposeAnnotation(); } builder.serializeNulls(); // correctly capture nulls Gson gson = builder.create(); // serialize this object JsonElement jsonObj = gson.toJsonTree(this); jsonObj.getAsJsonObject().addProperty("id", this.getId()); return jsonObj.getAsJsonObject(); } }
apache-2.0
apache/solr
solr/core/src/test/org/apache/solr/cloud/OverseerSolrResponseUnsafeSerializationTest.java
2550
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.cloud; import org.junit.AfterClass; import org.junit.BeforeClass; public class OverseerSolrResponseUnsafeSerializationTest extends OverseerSolrResponseTest { @BeforeClass public static void setUpClass() { System.setProperty("solr.useUnsafeOverseerResponse", "true"); } @AfterClass public static void tearDownClass() { System.clearProperty("solr.useUnsafeOverseerResponse"); } public void testUnsafeSerializartionToggles() { assertToggles("true", true, true); assertToggles("deserialization", false, true); assertToggles(null, false, false); // By default, don't use unsafe assertToggles("foo", false, false); assertToggles("false", false, false); assertToggles("serialization", false, false); // This is not an option } private void assertToggles( String propertyValue, boolean serializationEnabled, boolean deserializationEnabled) { String previousValue = System.getProperty("solr.useUnsafeOverseerResponse"); try { if (propertyValue == null) { System.clearProperty("solr.useUnsafeOverseerResponse"); } else { System.setProperty("solr.useUnsafeOverseerResponse", propertyValue); } assertEquals( "Unexpected serialization toggle for value: " + propertyValue, serializationEnabled, OverseerSolrResponseSerializer.useUnsafeSerialization()); assertEquals( "Unexpected serialization toggle for value: " + propertyValue, deserializationEnabled, OverseerSolrResponseSerializer.useUnsafeDeserialization()); } finally { if (previousValue != null) { System.setProperty("solr.useUnsafeOverseerResponse", previousValue); } } } }
apache-2.0
barnyard/p2p-core
src/test/java/com/bt/pi/core/cli/commands/KoalaNodeCommandTest.java
880
package com.bt.pi.core.cli.commands; import static org.junit.Assert.assertEquals; import java.io.PrintStream; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import com.bt.pi.core.cli.commands.KoalaNodeCommand; import com.bt.pi.core.node.KoalaNode; public class KoalaNodeCommandTest { private KoalaNodeCommand koalaNodeCommand; private KoalaNode koalaNode; @Before public void before() { koalaNode = Mockito.mock(KoalaNode.class); this.koalaNodeCommand = new KoalaNodeCommand() { public void execute(PrintStream outputStream) { } public String getDescription() { return "desc"; } public String getKeyword() { return "test"; } }; } @Test public void testGetterAndSetter() { // act koalaNodeCommand.setKoalaNode(koalaNode); // assert assertEquals(koalaNode, koalaNodeCommand.getKoalaNode()); } }
apache-2.0
akon-dey/ground
ground-core/src/test/java/edu/berkeley/ground/api/versions/neo4j/Neo4jVersionHistoryDAGFactoryTest.java
1104
package edu.berkeley.ground.api.versions.neo4j; import org.junit.Test; import edu.berkeley.ground.api.Neo4jTest; import edu.berkeley.ground.api.versions.VersionHistoryDAG; import edu.berkeley.ground.api.versions.VersionSuccessor; import edu.berkeley.ground.db.Neo4jClient.Neo4jConnection; import edu.berkeley.ground.exceptions.GroundException; import static org.junit.Assert.*; public class Neo4jVersionHistoryDAGFactoryTest extends Neo4jTest { public Neo4jVersionHistoryDAGFactoryTest() throws GroundException { super(); } @Test public void testVersionHistoryDAGCreation() throws GroundException { Neo4jConnection connection = null; try { String testId = "Nodes.test"; super.versionHistoryDAGFactory.create(testId); connection = super.neo4jClient.getConnection(); VersionHistoryDAG<?> dag = super.versionHistoryDAGFactory.retrieveFromDatabase(connection, testId); assertEquals(0, dag.getEdgeIds().size()); } finally { connection.abort(); } } }
apache-2.0
nikhilvibhav/camel
components/camel-jetty/src/main/java/org/apache/camel/component/jetty9/AttachmentHttpBinding.java
6370
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.jetty9; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Collection; import java.util.Enumeration; import java.util.Map; import javax.activation.DataHandler; import javax.activation.DataSource; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.Part; import org.apache.camel.Exchange; import org.apache.camel.RuntimeCamelException; import org.apache.camel.attachment.Attachment; import org.apache.camel.attachment.AttachmentMessage; import org.apache.camel.attachment.DefaultAttachment; import org.apache.camel.component.jetty.MultiPartFilter; import org.apache.camel.http.common.DefaultHttpBinding; import org.apache.camel.http.common.HttpHelper; import org.apache.camel.http.common.HttpMessage; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * To handle attachments with Jetty 9. * <p/> * This implementation is needed to deal with attachments when using Jetty 9. */ final class AttachmentHttpBinding extends DefaultHttpBinding { private static final Logger LOG = LoggerFactory.getLogger(AttachmentHttpBinding.class); AttachmentHttpBinding() { } @Override protected void populateAttachments(HttpServletRequest request, HttpMessage message) { Boolean object = (Boolean) request.getAttribute(MultiPartFilter.MULTIPART); if (object != null && object) { Collection<Part> parts; try { parts = request.getParts(); for (Part part : parts) { DataSource ds = new PartDataSource(part); Attachment attachment = new DefaultAttachment(ds); for (String headerName : part.getHeaderNames()) { for (String headerValue : part.getHeaders(headerName)) { attachment.addHeader(headerName, headerValue); } } AttachmentMessage am = message.getExchange().getMessage(AttachmentMessage.class); am.addAttachmentObject(part.getName(), attachment); String name = part.getSubmittedFileName(); Object value = am.getAttachment(name); Map<String, Object> headers = message.getHeaders(); if (getHeaderFilterStrategy() != null && !getHeaderFilterStrategy().applyFilterToExternalHeaders(name, value, message.getExchange()) && name != null) { HttpHelper.appendHeader(headers, name, value); } } } catch (Exception e) { throw new RuntimeCamelException("Cannot populate attachments", e); } } } @Override protected void populateRequestParameters(HttpServletRequest request, HttpMessage message) throws Exception { // we populate the http request parameters without checking the request // method Map<String, Object> headers = message.getHeaders(); // remove Content-Encoding from request if (request instanceof org.eclipse.jetty.server.Request) { org.eclipse.jetty.server.Request jettyRequest = (org.eclipse.jetty.server.Request) request; jettyRequest.getHttpFields().remove(Exchange.CONTENT_ENCODING); } // attachment is optional AttachmentMessage am = message.getExchange().getMessage(AttachmentMessage.class); Enumeration<?> names = request.getParameterNames(); while (names.hasMoreElements()) { String name = (String) names.nextElement(); if (am != null && am.getAttachment(name) != null) { DataHandler dh = am.getAttachment(name); Object value = dh; if (dh.getContentType() == null || dh.getContentType().startsWith("text/plain")) { value = request.getParameter(name); } if (getHeaderFilterStrategy() != null && !getHeaderFilterStrategy().applyFilterToExternalHeaders(name, value, message.getExchange())) { HttpHelper.appendHeader(headers, name, value); } continue; } // there may be multiple values for the same name String[] values = request.getParameterValues(name); LOG.trace("HTTP parameter {} = {}", name, values); if (values != null) { for (String value : values) { if (getHeaderFilterStrategy() != null && !getHeaderFilterStrategy().applyFilterToExternalHeaders(name, value, message.getExchange())) { HttpHelper.appendHeader(headers, name, value); } } } } } final class PartDataSource implements DataSource { private final Part part; PartDataSource(Part part) { this.part = part; } @Override public OutputStream getOutputStream() throws IOException { return null; } @Override public String getName() { return part.getName(); } @Override public InputStream getInputStream() throws IOException { return part.getInputStream(); } @Override public String getContentType() { return part.getContentType(); } } }
apache-2.0
y1011/cas-server
cas-server-core-authentication/src/main/java/org/jasig/cas/authentication/SuccessfulHandlerMetaDataPopulator.java
1429
package org.jasig.cas.authentication; import org.springframework.stereotype.Component; import java.util.HashSet; import java.util.Set; /** * Sets an authentication attribute containing the collection of authentication handlers (by name) that successfully * authenticated credential. The attribute name is given by {@link #SUCCESSFUL_AUTHENTICATION_HANDLERS}. * This component provides a simple method to inject successful handlers into the CAS ticket validation * response to support level of assurance and MFA use cases. * * @author Marvin S. Addison * @author Alaa Nassef * @since 4.0.0 */ @Component("successfulHandlerMetaDataPopulator") public class SuccessfulHandlerMetaDataPopulator implements AuthenticationMetaDataPopulator { /** Attribute name containing collection of handler names that successfully authenticated credential. */ public static final String SUCCESSFUL_AUTHENTICATION_HANDLERS = "successfulAuthenticationHandlers"; @Override public void populateAttributes(final AuthenticationBuilder builder, final Credential credential) { Set<String> successes = builder.getSuccesses().keySet(); if (successes != null) { successes = new HashSet(successes); } builder.addAttribute(SUCCESSFUL_AUTHENTICATION_HANDLERS, successes); } @Override public boolean supports(final Credential credential) { return true; } }
apache-2.0
megoesrawr/xerial.storage
src/main/java/org/xerial/db/btree/BTreePageHeader.java
2779
/*-------------------------------------------------------------------------- * Copyright 2007 Taro L. Saito * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *--------------------------------------------------------------------------*/ //-------------------------------------- // XerialJ // // PageHeader.java // Since: Aug 28, 2007 10:05:17 AM // // $URL:http://www.xerial.org/svn/project/XerialJ/trunk/XerialJ/src/main/java/org/xerial/db/btree/BTreePageHeader.java $ // $Author:leo $ //-------------------------------------- package org.xerial.db.btree; import org.xerial.db.CommonPageHeader; import org.xerial.db.PageHeader; import org.xerial.db.cache.Buffer; import org.xerial.db.cache.BufferReader; import org.xerial.db.cache.BufferWriter; import org.xerial.db.datatype.TypeInformation; /** * Header of the page * @author leo * */ public class BTreePageHeader implements PageHeader { private final CommonPageHeader defaultHeader = new CommonPageHeader(); private int numEntries = 0; private boolean isDirty = false; public BTreePageHeader() { } public long getPageID() { return defaultHeader.getPageID(); } public void setPageID(long pageID) { defaultHeader.setPageID(pageID); } public int getNumEntries() { return numEntries; } public void setNumEntries(int numEntries) { if(numEntries < 0) throw new IllegalArgumentException("numEntries cannot be less than 0: " + numEntries); this.numEntries = numEntries; } public boolean isDirty() { return isDirty; } public void setDirty(boolean isDirty) { this.isDirty = isDirty; } public void save(BufferWriter writer) { defaultHeader.save(writer); writer.writeInt(numEntries); writer.writeBoolean(isDirty); } public void load(BufferReader reader) { defaultHeader.load(reader); numEntries = reader.readInt(); isDirty = reader.readBoolean(); } public int getHeaderSize() { final int size = TypeInformation.INT_SIZE + TypeInformation.BOOLEAN_SIZE; return defaultHeader.getHeaderSize() + size; } }
apache-2.0
vkmc/minijava
SyntacticAnalyzer/src/SyntacticAnalyzer/Tokenizer.java
19624
package SyntacticAnalyzer; import java.util.HashSet; /** * Analizador léxico. Clase encargada de la tokenización del código fuente. * * @author Ramiro Agis * @author Victoria Martínez de la Cruz */ public class Tokenizer { private int lineNumber, currentState; private char currentChar; private InputReader reader; private HashSet<String> keywords; /** * Constructor de la clase Tokenizer * * Inicializa los marcadores y las estructuras de datos utilizadas. * * @param filename Path del archivo con el código fuente */ public Tokenizer(String filename) { this.currentState = 0; this.lineNumber = 1; keywords = new HashSet<>(); populateKeywords(); reader = new InputReader(filename); } /** * Proceso de tokenización. * * Análisis léxico del archivo de código fuente. * * Devuelve un token por vez recorriendo el archivo de forma secuencial. * * @returns Token encontrado * @throws LexicalException */ public Token getToken() throws LexicalException { StringBuilder lexeme = new StringBuilder(); boolean flagZero = false; this.currentState = 0; while (true) { currentChar = (char) reader.readChar(); switch (currentState) { case 0: { if (currentChar == '_' || isASCIILetter(currentChar)) { currentState = 1; lexeme.append(currentChar); } else if (Character.isDigit(currentChar)) { if (currentChar == '0') { flagZero = true; } currentState = 2; lexeme.append(currentChar); } else { switch (currentChar) { case ' ': break; case '\t': break; case '\n': lineNumber++; break; case '\'': currentState = 3; lexeme.append(currentChar); break; case '"': currentState = 4; lexeme.append(currentChar); break; case '>': currentState = 5; lexeme.append(currentChar); break; case '<': currentState = 6; lexeme.append(currentChar); break; case '=': currentState = 7; lexeme.append(currentChar); break; case '!': currentState = 8; lexeme.append(currentChar); break; case '&': currentState = 9; lexeme.append(currentChar); break; case '|': currentState = 10; lexeme.append(currentChar); break; case '/': currentState = 11; break; case '+': return new Token("+", "+", lineNumber); case '-': return new Token("-", "-", lineNumber); case '*': return new Token("*", "*", lineNumber); case '(': return new Token("(", "(", lineNumber); case ')': return new Token(")", ")", lineNumber); case '{': return new Token("{", "{", lineNumber); case '}': return new Token("}", "}", lineNumber); case ';': return new Token(";", ";", lineNumber); case ',': return new Token(",", ",", lineNumber); case '.': return new Token(".", ".", lineNumber); case '%': return new Token("%", "%", lineNumber); case '\0': lineNumber--; return new Token("EOF", "\\0", lineNumber); default: throw new LexicalException("Linea: " + lineNumber + " - Error lexico: Caracter no soportado (" + currentChar + ")."); } } break; } case 1: if (isASCIILetter(currentChar) || Character.isDigit(currentChar) || currentChar == '_') { lexeme.append(currentChar); } else { Token token; String lexemeString = lexeme.toString(); if (keywords.contains(lexemeString)) { // Es una palabra clave. token = new Token(lexemeString, lexemeString, lineNumber); } else { // Es un identificador. token = new Token("id", lexemeString, lineNumber); } checkNL(currentChar); return token; } break; case 2: if (isASCIILetter(currentChar)) { throw new LexicalException("Linea: " + lineNumber + " - Error lexico: Número mal formado (" + lexeme + currentChar + ")."); } else if (Character.isDigit(currentChar)) { if (flagZero) { throw new LexicalException("Linea: " + lineNumber + " - Error lexico: Número mal formado. Un número no puede empezar con 0 (" + lexeme + currentChar + ")."); } else { lexeme.append(currentChar); } } else { Token token = new Token("intLiteral", lexeme.toString(), lineNumber); checkNL(currentChar); return token; } break; case 3: if (currentChar != '\\' && currentChar != '\'' && currentChar != '\n') { if (!isValidChar(currentChar)) { throw new LexicalException("Linea: " + lineNumber + " - Error lexico: Caracter no soportado (" + currentChar + ")."); } lexeme.append(currentChar); currentState = 31; break; } else if (currentChar == '\\') { lexeme.append(currentChar); currentState = 32; break; } else if (currentChar == '\'') { throw new LexicalException("Linea: " + lineNumber + " - Error lexico: Caracter vacio (" + lexeme + currentChar + ")."); } else { throw new LexicalException("Linea: " + lineNumber + " - Error lexico: Caracter mal formado (" + lexeme + currentChar + ")."); } case 31: if (currentChar == '\'') { lexeme.append(currentChar); String lexemeString = lexeme.toString(); return new Token("charLiteral", lexemeString, lineNumber); } else { throw new LexicalException("Linea: " + lineNumber + " - Error lexico: Caracter mal formado (" + lexeme + currentChar + ")."); } case 32: if (currentChar != '\n' && currentChar != '\t' && currentChar != '\0') { if (!isValidChar(currentChar)) { throw new LexicalException("Linea: " + lineNumber + " - Error lexico: Caracter no soportado (" + currentChar + ")."); } lexeme.append(currentChar); currentState = 31; break; } else { throw new LexicalException("Linea: " + lineNumber + " - Error lexico: Caracter no soportado (" + currentChar + ")."); } case 4: if (currentChar != '\n' && currentChar != '"') { if (!isValidChar(currentChar)) { throw new LexicalException("Linea: " + lineNumber + " - Error lexico: Caracter no soportado (" + currentChar + ")."); } lexeme.append(currentChar); currentState = 41; break; } else if (currentChar == '"') { lexeme.append(currentChar); String lexemeString = lexeme.toString(); return new Token("stringLiteral", lexemeString, lineNumber); } else { throw new LexicalException("Linea: " + lineNumber + " - Error lexico: Cadena mal formada (" + lexeme + currentChar + ")."); } case 41: if (currentChar != '\n' && currentChar != '"') { if (!isValidChar(currentChar)) { throw new LexicalException("Linea: " + lineNumber + " - Error lexico: Caracter no soportado (" + currentChar + ")."); } lexeme.append(currentChar); break; } else if (currentChar == '"') { lexeme.append(currentChar); String lexemeString = lexeme.toString(); return new Token("stringLiteral", lexemeString, lineNumber); } else { throw new LexicalException("Linea: " + lineNumber + " - Error lexico: Cadena mal formada (" + lexeme + currentChar + ")."); } case 5: if (currentChar == '=') { return new Token(">=", ">=", lineNumber); } else { Token token = new Token(">", ">", lineNumber); checkNL(currentChar); return token; } case 6: if (currentChar == '=') { return new Token("<=", "<=", lineNumber); } else { Token token = new Token("<", "<", lineNumber); checkNL(currentChar); return token; } case 7: if (currentChar == '=') { return new Token("==", "==", lineNumber); } else { Token token = new Token("=", "=", lineNumber); checkNL(currentChar); return token; } case 8: if (currentChar == '=') { return new Token("!=", "!=", lineNumber); } else { Token token = new Token("!", "!", lineNumber); checkNL(currentChar); return token; } case 9: if (currentChar == '&') { return new Token("&&", "&&", lineNumber); } else { throw new LexicalException("Linea: " + lineNumber + " - Error lexico: Operador no soportado (" + lexeme + currentChar + ")."); } case 10: if (currentChar == '|') { return new Token("||", "||", lineNumber); } else { throw new LexicalException("Linea: " + lineNumber + " - Error lexico: Operador no soportado (" + lexeme + currentChar + ")."); } case 11: if (currentChar == '/') { proccessComment(); // S11.1 currentState = 0; break; } else if (currentChar == '*') { processBlockComment(); // S11.2 currentState = 0; break; } else { Token token = new Token("/", "/", lineNumber); checkNL(currentChar); return token; } } } } // Inicializacion de estructuras /** * Palabras reservadas de MiniJava. * * Inicialización de una estructura de datos con las palabras reservadas de * MiniJava. * * Si un lexema es reconocido como palabra reservada, se creará el Token * adecuado. * */ private void populateKeywords() { keywords.add("class"); keywords.add("extends"); keywords.add("var"); keywords.add("static"); keywords.add("dynamic"); keywords.add("void"); keywords.add("boolean"); keywords.add("char"); keywords.add("int"); keywords.add("String"); keywords.add("if"); keywords.add("else"); keywords.add("while"); keywords.add("for"); keywords.add("return"); keywords.add("this"); keywords.add("new"); keywords.add("null"); keywords.add("true"); keywords.add("false"); } // Procesamiento de comentarios /** * Procesamiento de las líneas de comentario. * * Consume la línea comentada para seguir con la tokenización. */ private void proccessComment() throws LexicalException { currentChar = (char) reader.readChar(); while (currentChar != '\n') { if (!isValidChar(currentChar)) { throw new LexicalException("Linea: " + lineNumber + " - Error lexico: Caracter no soportado (" + currentChar + ")."); } currentChar = (char) reader.readChar(); } checkNL(currentChar); } /** * Procesamiento de los bloques de comentarios. * * Consume el bloque de comentarios para seguir con la tokenización. Si se * encuentra el fin de archivo, se considera como un error y se lanzará una * excepción. * * @throws LexicalException */ private void processBlockComment() throws LexicalException { boolean closeBlockComment = false; char nextChar; currentChar = (char) reader.readChar(); nextChar = (char) reader.readChar(); while (!closeBlockComment && nextChar != '\0') { if (!isValidChar(currentChar)) { throw new LexicalException("Linea: " + lineNumber + " - Error lexico: Caracter no soportado (" + currentChar + ")."); } if (currentChar == '\n') { checkNL(currentChar); } if (currentChar == '*' && nextChar == '/') { closeBlockComment = true; } currentChar = nextChar; nextChar = (char) reader.readChar(); } if (nextChar == '\0') { throw new LexicalException("Linea: " + lineNumber + " - Error lexico: El bloque de comentario no esta cerrado y se alcanzo el fin de archivo."); } else { checkNL(nextChar); } } // Controles de validez /** * Verificación de número bien formado. * * Para facilitar la descripción de errores en futuras etapas se previene la * aparición de combinaciones sintácticamente imposibles (e.g. 123hola) en * esta etapa. * * @returns true si el caracter encontrado determina un número mal formado, * false en caso contrario */ private boolean notExpectedCharNumber(char currentChar) { if (Character.isDigit(currentChar) || currentChar == ' ' || currentChar == '\t' || currentChar == '\n' || currentChar == '+' || currentChar == '-' || currentChar == '/' || currentChar == '*' || currentChar == '%' || currentChar == ',' || currentChar == ';' || currentChar == '>' || currentChar == '<' || currentChar == '=' || currentChar == '!' || currentChar == ')') { return false; } else { return true; } } /** * Controla que el caracter pasado por parametro pertenezca al conjunto de * letras ASCII basico. * * @param currentChar * @return true si el caracter pertenece al conjunto de letras ASCII basico, * false en caso contrario */ private boolean isASCIILetter(char currentChar) { return (currentChar >= 'a' && currentChar <= 'z') || (currentChar >= 'A' && currentChar <= 'Z'); } /** * Controla que el caracter pasado por parametro pertenezca al conjunto de * caracteres imprimibles ASCII basico. * * @param currentChar * @return true si el caracter pertenece al conjunt ode caracteres ASCII * basico, false en caso contrario */ private boolean isValidChar(char currentChar) { return currentChar == '\n' || currentChar == '\t' || currentChar >= 32 && currentChar < 127; } /** * Controla el incremento de lineas en caso de que se haya cambiado de linea * y no llegue a procesarse. * * Si no es '\n' todavia no ocurrio un salto y debe volverse el marcador * hacia atras para procesar el caracter actual. Si es un '\n', ocurrio un * salto y no tiene sentido volver el marcador hacia atras puesto que se * debe procesar siguiente linea (ya actualizada por el reader) * * @param currentChar */ private void checkNL(char currentChar) { if (currentChar == '\n') { lineNumber++; } else { reader.resetMark(); } } }
apache-2.0
greenlaw110/Rythm
src/main/java/org/rythmengine/extension/ICacheService.java
2732
/** * Copyright (C) 2013-2016 The Rythm Engine project * for LICENSE and other details see: * https://github.com/rythmengine/rythmengine */ package org.rythmengine.extension; /*- * #%L * Rythm Template Engine * %% * Copyright (C) 2017 - 2021 OSGL (Open Source General Library) * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.Serializable; /** * Define cache service */ public interface ICacheService { /** * Store an item into the cache service by key and set ttl value * * @param key * @param value * @param ttl time to live of the cached item. Time unit is second. * If set to negative number, then it will never expire. * If set to zero then the default ttl value will be used */ void put(String key, Serializable value, int ttl); /** * Store an item into the cache by key and use default ttl * * @param key * @param value */ void put(String key, Serializable value); /** * Remove an item from cache service by key * * @param key * @return the value associated with the key * @deprecated use {@link #evict(String)} instead */ Serializable remove(String key); /** * Remove an item from the cache service by key * @param key */ void evict(String key); /** * Return an item from the cache service by key * * @param key * @return the value associated with the key */ Serializable get(String key); /** * Check if the cache contains key * * @param key * @return true if key is found in the cache * @deprecated */ boolean contains(String key); /** * Remove all cached items */ void clear(); /** * Set default ttl value which will be used if user pass 0 as ttl or not specified ttl * * @param ttl */ void setDefaultTTL(int ttl); /** * Shutdown the cache service */ void shutdown(); /** * Restart the cache service after shutdown * * <p>Note, by default the cache service * should be started after initialized</p> */ void startup(); }
apache-2.0
apache/geronimo-yoko
yoko-core/src/test/java/test/rmi/ClientMain.java
17558
/** * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package test.rmi; import java.io.BufferedReader; import java.io.FileReader; import java.io.Serializable; import java.rmi.Remote; import java.rmi.RemoteException; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.Vector; import java.util.concurrent.TimeUnit; import javax.rmi.PortableRemoteObject; import org.junit.Assert; import org.omg.PortableServer.POA; public class ClientMain extends Assert { public static class Test extends Assert { private Sample sample; public Test(Sample sample) { this.sample = sample; } // Test invoking methods with primitive arguments public void testPrimitive() throws RemoteException { sample.setBoolean(true); assertTrue(sample.getBoolean()); sample.setByte((byte)64); assertEquals((byte)64, sample.getByte()); sample.setShort((short)128); assertEquals((short)128, sample.getShort()); sample.setInt(256); assertEquals(256, sample.getInt()); sample.setLong(512); assertEquals(512, sample.getLong()); sample.setChar('a'); assertEquals('a', sample.getChar()); } // Test invoking methods with signature conflicts and arrays public void testArray() throws RemoteException { assertTrue(10 == sample.sendReceiveInt(10)); int[] intA = new int[] {10, 20}; intA = sample.sendReceiveInt(intA); assertEquals(2, intA.length); assertTrue(20 == intA[0]); assertTrue(10 == intA[1]); assertTrue(10 == sample.sendReceiveShort((short)10)); short[] shortA = new short[] {10, 20}; shortA = sample.sendReceiveShort(shortA); assertEquals(2, shortA.length); assertTrue(20 == shortA[0]); assertTrue(10 == shortA[1]); assertTrue(10 == sample.sendReceiveChar((char)10)); char[] charA = new char[] {10, 20}; charA = sample.sendReceiveChar(charA); assertEquals(2, charA.length); assertTrue(20 == charA[0]); assertTrue(10 == charA[1]); assertTrue(10 == sample.sendReceiveByte((byte)10)); byte[] byteA = new byte[] {10, 20}; byteA = sample.sendReceiveByte(byteA); assertEquals(2, byteA.length); assertTrue(20 == byteA[0]); assertTrue(10 == byteA[1]); assertTrue(10L == sample.sendReceiveLong(10L)); long[] longA = new long[] {10L, 20L}; longA = sample.sendReceiveLong(longA); assertEquals(2, longA.length); assertTrue(20L == longA[0]); assertTrue(10L == longA[1]); assertTrue(10. == sample.sendReceiveFloat((float)10.)); float[] floatA = new float[] {(float)10., (float)20.}; floatA = sample.sendReceiveFloat(floatA); assertEquals(2, floatA.length); assertTrue(20. == floatA[0]); assertTrue(10. == floatA[1]); assertTrue(10. == sample.sendReceiveDouble(10.)); double[] doubleA = new double[] {10., 20.}; doubleA = sample.sendReceiveDouble(doubleA); assertEquals(2, doubleA.length); assertTrue(20. == doubleA[0]); assertTrue(10. == doubleA[1]); assertTrue(false == sample.sendReceiveBoolean(false)); boolean[] booleanA = new boolean[] {true, false}; booleanA = sample.sendReceiveBoolean(booleanA); assertEquals(2, booleanA.length); assertTrue(false == booleanA[0]); assertTrue(true == booleanA[1]); assertTrue("a".equals(sample.sendReceiveString("a"))); String[] StringA = new String[] {"a", "b"}; StringA = sample.sendReceiveString(StringA); assertEquals(2, StringA.length); assertTrue("b".equals(StringA[0])); assertTrue("a".equals(StringA[1])); SampleSerializable ser = new SampleSerializable(); ser.setInt(10); SampleSerializable ser2 = (SampleSerializable)sample.sendReceiveSerializable(ser); assertEquals(10, ser2.getInt()); Serializable[] sA = new Serializable[] { ser }; sA = sample.sendReceiveSerializable(sA); ser2 = (SampleSerializable)sA[0]; assertEquals(10, ser2.getInt()); Remote r = sample.sendReceiveRemote(sample); Sample sample2 = (Sample) PortableRemoteObject.narrow(r, Sample.class); assertEquals(sample, sample2); Remote[] rA = new Remote[] { sample }; rA = sample.sendReceiveRemote(rA); sample2 = (Sample) PortableRemoteObject.narrow(rA[0], Sample.class); assertEquals(sample, sample2); } // Invoke method with String argument public void testString() throws RemoteException { sample.setString("hello"); assertEquals("hello", sample.getString()); } // Make sure that a field definition for a value-type interface // gets marshaled correctly. The SampleSerializable object defines a // List field into which we'll place a Vector object. This should properly // be processed as a value type rather than an abstract interface. public void testVector() throws RemoteException { Vector v = new Vector(10); v.add("This is a test"); SampleSerializable ser = new SampleSerializable(); ser.setList(v); SampleSerializable ser2 = (SampleSerializable)sample.sendReceiveSerializable(ser); Vector v2 = (Vector)ser2.getList(); assertEquals(10, v2.capacity()); assertEquals(1, v2.size()); assertEquals("This is a test", v2.elementAt(0)); } public void testIntArray() throws RemoteException { int[] intArray = new int[] {1, 2, 3}; sample.setIntArray(intArray); int[] intArray2 = sample.getIntArray(); for(int i = 0; i < intArray.length; i++) { assertEquals(intArray[i], intArray2[i]); } } public void testBasicSerializable() throws RemoteException { SampleSerializable ser = new SampleSerializable(); sample.setSerializable(ser); SampleSerializable ser2 = (SampleSerializable) sample.getSerializable(); } public void testCmsfv2Data() throws RemoteException { SampleCmsfv2ChildData d = new SampleCmsfv2ChildData(); for (int i = 0; i < 10; i++) { System.out.println("Discarding " + d); d = new SampleCmsfv2ChildData(); } sample.setSerializable(d); Serializable s = sample.getSerializable(); assertNotSame(d, s); assertEquals(d, s); } public void testEnum() throws RemoteException { SampleEnum se = SampleEnum.SAMPLE2; sample.setSerializable(se); Serializable s = sample.getSerializable(); assertSame(se, s); } public void testEnumArray() throws RemoteException { SampleEnum[] sa = { SampleEnum.SAMPLE3, SampleEnum.SAMPLE1, SampleEnum.SAMPLE3 }; sample.setSerializable(sa); Object[] oa = (Object[])sample.getSerializable(); assertTrue(Arrays.deepEquals(sa, oa)); } public void testData() throws RemoteException { SampleData sd = new SampleData(); sample.setSerializable(sd); Serializable s = sample.getSerializable(); assertEquals(sd, s); } public void testTimeUnit() throws RemoteException { TimeUnit tu = TimeUnit.NANOSECONDS; sample.setSerializable(tu); Serializable s = sample.getSerializable(); assertSame(tu, s); } public void testTimeUnitArray() throws RemoteException { TimeUnit[] tua = { TimeUnit.NANOSECONDS, TimeUnit.HOURS, TimeUnit.NANOSECONDS }; sample.setSerializable(tua); Object[] oa = (Object[])sample.getSerializable(); assertTrue(Arrays.deepEquals(tua, oa)); } public void testRemoteAttributeOnServer() throws RemoteException { SampleSerializable ser = new SampleSerializable(); ser.setRemote(sample); sample.setSerializable(ser); SampleSerializable ser2 = (SampleSerializable) sample.getSerializable(); Sample sample2 = (Sample) PortableRemoteObject.narrow(ser2.getRemote(), Sample.class); assertEquals(sample, sample2); } public void testRemoteAttributeOnClient() throws RemoteException { SampleSerializable ser = new SampleSerializable(); SampleRemote sampleRemote = new SampleRemoteImpl(); ser.setRemote(sampleRemote); sample.setSerializable(ser); SampleSerializable ser2 = (SampleSerializable) sample.getSerializable(); SampleRemote sampleRemote2 = (SampleRemote) PortableRemoteObject.narrow(ser2.getRemote(), SampleRemote.class); sampleRemote.setInt(42); assertEquals(42, sampleRemote2.getInt()); } public void testComplexRemoteAttributeOnClient() throws RemoteException { SampleSerializable ser = new SampleSerializable(); SampleRemoteImpl sampleRemote = new SampleRemoteImpl(); ser.setSampleRemote(sampleRemote); sample.setSerializable(ser); SampleSerializable ser2 = (SampleSerializable) sample.getSerializable(); SampleRemote sampleRemote2 = ser2.getSampleRemote(); sampleRemote.setInt(42); assertEquals(42, sampleRemote2.getInt()); } public void testComplexRemoteArgument() throws RemoteException { SampleRemoteImpl sampleRemote = new SampleRemoteImpl(); sample.setSampleRemote(sampleRemote); sample.getSampleRemote(); } public void testSerializableAttribute() throws RemoteException { SampleSerializable ser = new SampleSerializable(); SampleSerializable attr = new SampleSerializable(); ser.setSerializable(attr); attr.setInt(42); sample.setSerializable(ser); SampleSerializable serCopy = (SampleSerializable) sample.getSerializable(); SampleSerializable attrCopy = (SampleSerializable) serCopy.getSerializable(); assertEquals(attr.getInt(), attrCopy.getInt()); } public void testSerializableSelfReference() throws RemoteException { SampleSerializable ser = new SampleSerializable(); ser.setSerializableObject(ser); sample.setSerializable(ser); SampleSerializable ser2 = (SampleSerializable) sample.getSerializable(); assertTrue(ser2 == ser2.getSerializableObject()); } public void testRemoteObjectAttribute() throws RemoteException { SampleSerializable ser = new SampleSerializable(); SampleRemoteImpl sampleRemote = new SampleRemoteImpl(); ser.setRemoteObject(sampleRemote); sample.setSerializable(ser); SampleSerializable ser2 = (SampleSerializable) sample.getSerializable(); SampleRemote sampleRemote2 = (SampleRemote) PortableRemoteObject.narrow(ser2.getRemoteObject(), SampleRemote.class); sampleRemote.setInt(42); assertEquals(42, sampleRemote2.getInt()); } public void testCorbaAttributeWithHelper(SampleCorba corbaRef) throws RemoteException { SampleSerializable ser = new SampleSerializable(); ser.setCorbaObj(corbaRef); sample.setSerializable(ser); SampleSerializable ser2 = (SampleSerializable) sample.getSerializable(); SampleCorba corbaRef2 = SampleCorbaHelper.narrow(ser2.getCorbaObj()); corbaRef.i(42); assertEquals(42, corbaRef2.i()); corbaRef.s("Don't panic!"); assertEquals("Don't panic!", corbaRef2.s()); } public void testCorbaAttributeWithPRO(SampleCorba corbaRef) throws RemoteException { SampleSerializable ser = new SampleSerializable(); ser.setCorbaObj(corbaRef); sample.setSerializable(ser); SampleSerializable ser2 = (SampleSerializable) sample.getSerializable(); SampleCorba corbaRef2 = (SampleCorba) PortableRemoteObject.narrow(ser2.getCorbaObj(), SampleCorba.class); corbaRef.i(42); assertEquals(42, corbaRef2.i()); corbaRef.s("Don't panic!"); assertEquals("Don't panic!", corbaRef2.s()); } public void testComplexCorbaAttribute(SampleCorba corbaRef) throws RemoteException { SampleSerializable ser = new SampleSerializable(); ser.setSampleCorba(corbaRef); sample.setSerializable(ser); SampleSerializable ser2 = (SampleSerializable) sample.getSerializable(); SampleCorba corbaRef2 = ser2.getSampleCorba(); } public void testHashMap() throws RemoteException { HashMap<Integer, Serializable> map = new HashMap<>(); String str = new String("hello"); map.put(0, str); map.put(1, str); Integer two = new Integer(2); map.put(3, two); map.put(4, two); sample.setSerializable(map); Map<?,?> map2 = (Map<?,?>) sample.getSerializable(); assertEquals(map, map2); assertSame(map2.get(3), map2.get(4)); assertSame(map2.get(0), map2.get(1)); } public void testClass() throws RemoteException { final Class<?> type = Object.class; sample.setSerializable(type); Serializable s = sample.getSerializable(); assertSame(s, type); } public void testClassArray() throws RemoteException { final Class<?>[] types = { Object.class, Map.class, String.class, Map.class }; sample.setSerializable(types); Object[] oa = (Object[])sample.getSerializable(); assertArrayEquals(types, oa); } } public static void main(String[] args) throws Exception { // Initialize ORB final org.omg.CORBA.ORB orb = org.omg.CORBA.ORB.init(new String[0], null); POA rootPoa = (POA) orb.resolve_initial_references("RootPOA"); rootPoa.the_POAManager().activate(); System.out.println("ORB: " + orb.getClass().getName()); // Bind a sample CORBA object SampleCorba_impl sampleCorba = new SampleCorba_impl(); byte [] id = rootPoa.activate_object(sampleCorba); org.omg.CORBA.Object sampleCorbaRef = rootPoa.create_reference_with_id(id, sampleCorba._all_interfaces(rootPoa, id)[0]); // Get IOR to Sample on server BufferedReader reader = new BufferedReader(new FileReader("Sample.ref")); String ref = reader.readLine(); org.omg.CORBA.Object sampleRef = orb.string_to_object(ref); Sample sample = (Sample) PortableRemoteObject.narrow(sampleRef, Sample.class); // Run RMI tests Test test = new Test(sample); test.testVector(); test.testPrimitive(); test.testArray(); test.testString(); test.testIntArray(); test.testBasicSerializable(); test.testRemoteObjectAttribute(); test.testRemoteAttributeOnServer(); test.testRemoteAttributeOnClient(); test.testComplexRemoteAttributeOnClient(); test.testComplexRemoteArgument(); test.testSerializableAttribute(); test.testSerializableSelfReference(); test.testCorbaAttributeWithHelper(SampleCorbaHelper.narrow(sampleCorbaRef)); test.testCorbaAttributeWithPRO((SampleCorba) PortableRemoteObject.narrow(sampleCorbaRef, SampleCorba.class)); test.testComplexCorbaAttribute(SampleCorbaHelper.narrow(sampleCorbaRef)); test.testHashMap(); test.testEnum(); test.testEnumArray(); test.testData(); test.testTimeUnit(); test.testTimeUnitArray(); test.testCmsfv2Data(); test.testClass(); test.testClassArray(); //myORB.destroy(); System.out.println("Testing complete"); } }
apache-2.0
sensui/guava-libraries
guava-tests/test/com/google/common/util/concurrent/MoreExecutorsTest.java
21911
/* * Copyright (C) 2008 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Portions of this file are modified versions of * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/test/tck/AbstractExecutorServiceTest.java?revision=1.30 * which contained the following notice: * * Written by Doug Lea with assistance from members of JCP JSR-166 * Expert Group and released to the public domain, as explained at * http://creativecommons.org/publicdomain/zero/1.0/ * Other contributors include Andrew Wright, Jeffrey Hayes, * Pat Fisher, Mike Judd. */ package com.google.common.util.concurrent; import static com.google.common.collect.Iterables.getOnlyElement; import static com.google.common.util.concurrent.MoreExecutors.invokeAnyImpl; import static com.google.common.util.concurrent.MoreExecutors.listeningDecorator; import static com.google.common.util.concurrent.MoreExecutors.renamingDecorator; import static com.google.common.util.concurrent.MoreExecutors.sameThreadExecutor; import static java.util.concurrent.TimeUnit.SECONDS; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.truth0.Truth.ASSERT; import com.google.common.base.Suppliers; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.testing.ClassSanityTester; import com.google.common.util.concurrent.MoreExecutors.Application; import org.mockito.InOrder; import org.mockito.Mockito; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; /** * Tests for MoreExecutors. * * @author Kyle Littlefield (klittle) */ public class MoreExecutorsTest extends JSR166TestCase { private static final Runnable EMPTY_RUNNABLE = new Runnable() { @Override public void run() {} }; public void testSameThreadExecutorServiceInThreadExecution() throws Exception { final ListeningExecutorService executor = MoreExecutors.sameThreadExecutor(); final ThreadLocal<Integer> threadLocalCount = new ThreadLocal<Integer>() { @Override protected Integer initialValue() { return 0; } }; final AtomicReference<Throwable> throwableFromOtherThread = new AtomicReference<Throwable>(null); final Runnable incrementTask = new Runnable() { @Override public void run() { threadLocalCount.set(threadLocalCount.get() + 1); } }; Thread otherThread = new Thread( new Runnable() { @Override public void run() { try { Future<?> future = executor.submit(incrementTask); assertTrue(future.isDone()); assertEquals(1, threadLocalCount.get().intValue()); } catch (Throwable Throwable) { throwableFromOtherThread.set(Throwable); } } }); otherThread.start(); ListenableFuture<?> future = executor.submit(incrementTask); assertTrue(future.isDone()); assertListenerRunImmediately(future); assertEquals(1, threadLocalCount.get().intValue()); otherThread.join(1000); assertEquals(Thread.State.TERMINATED, otherThread.getState()); Throwable throwable = throwableFromOtherThread.get(); assertNull("Throwable from other thread: " + (throwable == null ? null : Throwables.getStackTraceAsString(throwable)), throwableFromOtherThread.get()); } public void testSameThreadExecutorInvokeAll() throws Exception { final ExecutorService executor = MoreExecutors.sameThreadExecutor(); final ThreadLocal<Integer> threadLocalCount = new ThreadLocal<Integer>() { @Override protected Integer initialValue() { return 0; } }; final Callable<Integer> incrementTask = new Callable<Integer>() { @Override public Integer call() { int i = threadLocalCount.get(); threadLocalCount.set(i + 1); return i; } }; List<Future<Integer>> futures = executor.invokeAll(Collections.nCopies(10, incrementTask)); for (int i = 0; i < 10; i++) { Future<Integer> future = futures.get(i); assertTrue("Task should have been run before being returned", future.isDone()); assertEquals(i, future.get().intValue()); } assertEquals(10, threadLocalCount.get().intValue()); } public void testSameThreadExecutorServiceTermination() throws Exception { final ExecutorService executor = MoreExecutors.sameThreadExecutor(); final CyclicBarrier barrier = new CyclicBarrier(2); final AtomicReference<Throwable> throwableFromOtherThread = new AtomicReference<Throwable>(null); final Runnable doNothingRunnable = new Runnable() { @Override public void run() { }}; Thread otherThread = new Thread(new Runnable() { @Override public void run() { try { Future<?> future = executor.submit(new Callable<Void>() { @Override public Void call() throws Exception { // WAIT #1 barrier.await(1, TimeUnit.SECONDS); // WAIT #2 barrier.await(1, TimeUnit.SECONDS); assertTrue(executor.isShutdown()); assertFalse(executor.isTerminated()); // WAIT #3 barrier.await(1, TimeUnit.SECONDS); return null; } }); assertTrue(future.isDone()); assertTrue(executor.isShutdown()); assertTrue(executor.isTerminated()); } catch (Throwable Throwable) { throwableFromOtherThread.set(Throwable); } }}); otherThread.start(); // WAIT #1 barrier.await(1, TimeUnit.SECONDS); assertFalse(executor.isShutdown()); assertFalse(executor.isTerminated()); executor.shutdown(); assertTrue(executor.isShutdown()); try { executor.submit(doNothingRunnable); fail("Should have encountered RejectedExecutionException"); } catch (RejectedExecutionException ex) { // good to go } assertFalse(executor.isTerminated()); // WAIT #2 barrier.await(1, TimeUnit.SECONDS); assertFalse(executor.awaitTermination(20, TimeUnit.MILLISECONDS)); // WAIT #3 barrier.await(1, TimeUnit.SECONDS); assertTrue(executor.awaitTermination(1, TimeUnit.SECONDS)); assertTrue(executor.awaitTermination(0, TimeUnit.SECONDS)); assertTrue(executor.isShutdown()); try { executor.submit(doNothingRunnable); fail("Should have encountered RejectedExecutionException"); } catch (RejectedExecutionException ex) { // good to go } assertTrue(executor.isTerminated()); otherThread.join(1000); assertEquals(Thread.State.TERMINATED, otherThread.getState()); Throwable throwable = throwableFromOtherThread.get(); assertNull("Throwable from other thread: " + (throwable == null ? null : Throwables.getStackTraceAsString(throwable)), throwableFromOtherThread.get()); } public void testSameThreadExecutor_shutdownNow() { ExecutorService executor = MoreExecutors.sameThreadExecutor(); assertEquals(ImmutableList.of(), executor.shutdownNow()); assertTrue(executor.isShutdown()); } public void testExecuteAfterShutdown() { ExecutorService executor = MoreExecutors.sameThreadExecutor(); executor.shutdown(); try { executor.execute(EMPTY_RUNNABLE); fail(); } catch (RejectedExecutionException expected) {} } public <T> void testListeningExecutorServiceInvokeAllJavadocCodeCompiles() throws Exception { ListeningExecutorService executor = MoreExecutors.sameThreadExecutor(); List<Callable<T>> tasks = ImmutableList.of(); @SuppressWarnings("unchecked") // guaranteed by invokeAll contract List<ListenableFuture<T>> futures = (List) executor.invokeAll(tasks); } public void testListeningDecorator() throws Exception { ListeningExecutorService service = listeningDecorator(MoreExecutors.sameThreadExecutor()); assertSame(service, listeningDecorator(service)); List<Callable<String>> callables = ImmutableList.of(Callables.returning("x")); List<Future<String>> results; results = service.invokeAll(callables); ASSERT.that(getOnlyElement(results)).isA(ListenableFutureTask.class); results = service.invokeAll(callables, 1, SECONDS); ASSERT.that(getOnlyElement(results)).isA(ListenableFutureTask.class); /* * TODO(cpovirk): move ForwardingTestCase somewhere common, and use it to * test the forwarded methods */ } public void testListeningDecorator_scheduleSuccess() throws Exception { final CountDownLatch completed = new CountDownLatch(1); ScheduledThreadPoolExecutor delegate = new ScheduledThreadPoolExecutor(1) { @Override protected void afterExecute(Runnable r, Throwable t) { completed.countDown(); } }; ListeningScheduledExecutorService service = listeningDecorator(delegate); ListenableFuture<?> future = service.schedule(Callables.returning(null), 1, TimeUnit.MILLISECONDS); /* * Wait not just until the Future's value is set (as in future.get()) but * also until ListeningScheduledExecutorService's wrapper task is done * executing listeners, as detected by yielding control to afterExecute. */ completed.await(); assertTrue(future.isDone()); assertListenerRunImmediately(future); assertEquals(0, delegate.getQueue().size()); } public void testListeningDecorator_scheduleFailure() throws Exception { ScheduledThreadPoolExecutor delegate = new ScheduledThreadPoolExecutor(1); ListeningScheduledExecutorService service = listeningDecorator(delegate); RuntimeException ex = new RuntimeException(); ListenableFuture<?> future = service.schedule(new ThrowingRunnable(0, ex), 1, TimeUnit.MILLISECONDS); assertExecutionException(future, ex); assertEquals(0, delegate.getQueue().size()); } public void testListeningDecorator_schedulePeriodic() throws Exception { ScheduledThreadPoolExecutor delegate = new ScheduledThreadPoolExecutor(1); ListeningScheduledExecutorService service = listeningDecorator(delegate); RuntimeException ex = new RuntimeException(); ListenableFuture<?> future; ThrowingRunnable runnable = new ThrowingRunnable(5, ex); future = service.scheduleAtFixedRate(runnable, 1, 1, TimeUnit.MILLISECONDS); assertExecutionException(future, ex); assertEquals(5, runnable.count); assertEquals(0, delegate.getQueue().size()); runnable = new ThrowingRunnable(5, ex); future = service.scheduleWithFixedDelay(runnable, 1, 1, TimeUnit.MILLISECONDS); assertExecutionException(future, ex); assertEquals(5, runnable.count); assertEquals(0, delegate.getQueue().size()); } public void testListeningDecorator_cancelled() throws Exception { ScheduledThreadPoolExecutor delegate = new ScheduledThreadPoolExecutor(1); BlockingQueue<?> delegateQueue = delegate.getQueue(); ListeningScheduledExecutorService service = listeningDecorator(delegate); ListenableFuture<?> future; ScheduledFuture<?> delegateFuture; Runnable runnable = new Runnable() { @Override public void run() {} }; future = service.schedule(runnable, 5, TimeUnit.MINUTES); future.cancel(true); assertTrue(future.isCancelled()); delegateFuture = (ScheduledFuture<?>) delegateQueue.element(); assertTrue(delegateFuture.isCancelled()); delegateQueue.clear(); future = service.scheduleAtFixedRate(runnable, 5, 5, TimeUnit.MINUTES); future.cancel(true); assertTrue(future.isCancelled()); delegateFuture = (ScheduledFuture<?>) delegateQueue.element(); assertTrue(delegateFuture.isCancelled()); delegateQueue.clear(); future = service.scheduleWithFixedDelay(runnable, 5, 5, TimeUnit.MINUTES); future.cancel(true); assertTrue(future.isCancelled()); delegateFuture = (ScheduledFuture<?>) delegateQueue.element(); assertTrue(delegateFuture.isCancelled()); } private static final class ThrowingRunnable implements Runnable { final int throwAfterCount; final RuntimeException thrown; int count; ThrowingRunnable(int throwAfterCount, RuntimeException thrown) { this.throwAfterCount = throwAfterCount; this.thrown = thrown; } @Override public void run() { if (++count >= throwAfterCount) { throw thrown; } } } private static void assertExecutionException(Future<?> future, Exception expectedCause) throws Exception { try { future.get(); fail("Expected ExecutionException"); } catch (ExecutionException e) { assertSame(expectedCause, e.getCause()); } } /** * invokeAny(null) throws NPE */ public void testInvokeAnyImpl_nullTasks() throws Exception { ListeningExecutorService e = sameThreadExecutor(); try { invokeAnyImpl(e, null, false, 0); shouldThrow(); } catch (NullPointerException success) { } finally { joinPool(e); } } /** * invokeAny(empty collection) throws IAE */ public void testInvokeAnyImpl_emptyTasks() throws Exception { ListeningExecutorService e = sameThreadExecutor(); try { invokeAnyImpl(e, new ArrayList<Callable<String>>(), false, 0); shouldThrow(); } catch (IllegalArgumentException success) { } finally { joinPool(e); } } /** * invokeAny(c) throws NPE if c has null elements */ public void testInvokeAnyImpl_nullElement() throws Exception { ListeningExecutorService e = sameThreadExecutor(); List<Callable<Integer>> l = new ArrayList<Callable<Integer>>(); l.add(new Callable<Integer>() { @Override public Integer call() { throw new ArithmeticException("/ by zero"); } }); l.add(null); try { invokeAnyImpl(e, l, false, 0); shouldThrow(); } catch (NullPointerException success) { } finally { joinPool(e); } } /** * invokeAny(c) throws ExecutionException if no task in c completes */ public void testInvokeAnyImpl_noTaskCompletes() throws Exception { ListeningExecutorService e = sameThreadExecutor(); List<Callable<String>> l = new ArrayList<Callable<String>>(); l.add(new NPETask()); try { invokeAnyImpl(e, l, false, 0); shouldThrow(); } catch (ExecutionException success) { assertTrue(success.getCause() instanceof NullPointerException); } finally { joinPool(e); } } /** * invokeAny(c) returns result of some task in c if at least one completes */ public void testInvokeAnyImpl() throws Exception { ListeningExecutorService e = sameThreadExecutor(); try { List<Callable<String>> l = new ArrayList<Callable<String>>(); l.add(new StringTask()); l.add(new StringTask()); String result = invokeAnyImpl(e, l, false, 0); assertSame(TEST_STRING, result); } finally { joinPool(e); } } private static void assertListenerRunImmediately(ListenableFuture<?> future) { CountingRunnable listener = new CountingRunnable(); future.addListener(listener, sameThreadExecutor()); assertEquals(1, listener.count); } private static final class CountingRunnable implements Runnable { int count; @Override public void run() { count++; } } public void testAddDelayedShutdownHook_success() throws InterruptedException { TestApplication application = new TestApplication(); ExecutorService service = mock(ExecutorService.class); application.addDelayedShutdownHook(service, 2, TimeUnit.SECONDS); verify(service, Mockito.never()).shutdown(); application.shutdown(); InOrder shutdownFirst = Mockito.inOrder(service); shutdownFirst.verify(service).shutdown(); shutdownFirst.verify(service).awaitTermination(2, TimeUnit.SECONDS); } public void testAddDelayedShutdownHook_interrupted() throws InterruptedException { TestApplication application = new TestApplication(); ExecutorService service = mock(ExecutorService.class); application.addDelayedShutdownHook(service, 2, TimeUnit.SECONDS); when(service.awaitTermination(2, TimeUnit.SECONDS)).thenThrow(new InterruptedException()); application.shutdown(); verify(service).shutdown(); } public void testGetExitingExcutorService_executorSetToUseDaemonThreads() { TestApplication application = new TestApplication(); ThreadPoolExecutor executor = new ThreadPoolExecutor( 1, 2, 3, TimeUnit.SECONDS, new ArrayBlockingQueue<Runnable>(1)); assertNotNull(application.getExitingExecutorService(executor)); assertTrue(executor.getThreadFactory().newThread(EMPTY_RUNNABLE).isDaemon()); } public void testGetExitingExcutorService_executorDelegatesToOriginal() { TestApplication application = new TestApplication(); ThreadPoolExecutor executor = mock(ThreadPoolExecutor.class); ThreadFactory threadFactory = mock(ThreadFactory.class); when(executor.getThreadFactory()).thenReturn(threadFactory); application.getExitingExecutorService(executor).execute(EMPTY_RUNNABLE); verify(executor).execute(EMPTY_RUNNABLE); } public void testGetExitingExcutorService_shutdownHookRegistered() throws InterruptedException { TestApplication application = new TestApplication(); ThreadPoolExecutor executor = mock(ThreadPoolExecutor.class); ThreadFactory threadFactory = mock(ThreadFactory.class); when(executor.getThreadFactory()).thenReturn(threadFactory); application.getExitingExecutorService(executor); application.shutdown(); verify(executor).shutdown(); } public void testGetExitingScheduledExcutorService_executorSetToUseDaemonThreads() { TestApplication application = new TestApplication(); ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(1); assertNotNull(application.getExitingScheduledExecutorService(executor)); assertTrue(executor.getThreadFactory().newThread(EMPTY_RUNNABLE).isDaemon()); } public void testGetExitingScheduledExcutorService_executorDelegatesToOriginal() { TestApplication application = new TestApplication(); ScheduledThreadPoolExecutor executor = mock(ScheduledThreadPoolExecutor.class); ThreadFactory threadFactory = mock(ThreadFactory.class); when(executor.getThreadFactory()).thenReturn(threadFactory); application.getExitingScheduledExecutorService(executor).execute(EMPTY_RUNNABLE); verify(executor).execute(EMPTY_RUNNABLE); } public void testGetScheduledExitingExcutorService_shutdownHookRegistered() throws InterruptedException { TestApplication application = new TestApplication(); ScheduledThreadPoolExecutor executor = mock(ScheduledThreadPoolExecutor.class); ThreadFactory threadFactory = mock(ThreadFactory.class); when(executor.getThreadFactory()).thenReturn(threadFactory); application.getExitingScheduledExecutorService(executor); application.shutdown(); verify(executor).shutdown(); } public void testPlatformThreadFactory_default() { ThreadFactory factory = MoreExecutors.platformThreadFactory(); assertNotNull(factory); // Executors#defaultThreadFactory() may return a new instance each time. assertEquals(factory.getClass(), Executors.defaultThreadFactory().getClass()); } public void testThreadRenaming() { Executor renamingExecutor = renamingDecorator(sameThreadExecutor(), Suppliers.ofInstance("FooBar")); String oldName = Thread.currentThread().getName(); renamingExecutor.execute(new Runnable() { @Override public void run() { assertEquals("FooBar", Thread.currentThread().getName()); }}); assertEquals(oldName, Thread.currentThread().getName()); } public void testExecutors_nullCheck() throws Exception { new ClassSanityTester() .forAllPublicStaticMethods(MoreExecutors.class) .thatReturn(Executor.class) .testNulls(); } private static class TestApplication extends Application { private final List<Thread> hooks = Lists.newArrayList(); @Override synchronized void addShutdownHook(Thread hook) { hooks.add(hook); } synchronized void shutdown() throws InterruptedException { for (Thread hook : hooks) { hook.start(); } for (Thread hook : hooks) { hook.join(); } } } }
apache-2.0
tensorics/tensorics-core
src/java/org/tensorics/core/iterable/lang/ScalarIterableSupport.java
2224
// @formatter:off /******************************************************************************* * * This file is part of tensorics. * * Copyright (c) 2008-2011, CERN. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ // @formatter:on package org.tensorics.core.iterable.lang; import org.tensorics.core.math.ExtendedField; import org.tensorics.core.scalar.lang.ScalarSupport; /** * Provides utility methods for acting on collections of field elements. * * @author kfuchsbe * @param <V> the type of the values of the field */ public class ScalarIterableSupport<V> extends ScalarSupport<V> { private final IterableOperationRepository<V> repository; public ScalarIterableSupport(ExtendedField<V> field) { super(field); this.repository = new IterableOperationRepository<>(field); } public final V averageOf(Iterable<V> values) { return repository.average().apply(values); } public final V sizeOf(Iterable<V> values) { return repository.size().apply(values); } public final V sumOf(Iterable<V> values) { return repository.sum().apply(values); } public V rmsOf(Iterable<V> values) { return repository.rms().apply(values); } public V sumOfSquaresOf(Iterable<V> values) { return repository.sumOfSquares().apply(values); } public V varOf(Iterable<V> values) { return repository.var().apply(values); } public V stdOf(Iterable<V> values) { return repository.std().apply(values); } }
apache-2.0
shutkou/ios-driver
server/src/main/java/org/uiautomation/ios/server/command/uiautomation/GetSessionsNHandler.java
2148
/* * Copyright 2012-2013 eBay Software Foundation and ios-driver committers * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.uiautomation.ios.server.command.uiautomation; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.openqa.selenium.remote.Response; import org.uiautomation.ios.IOSCapabilities; import org.uiautomation.ios.communication.WebDriverLikeRequest; import org.uiautomation.ios.server.IOSServerManager; import org.uiautomation.ios.server.ServerSideSession; import org.uiautomation.ios.server.application.IOSRunningApplication; import org.uiautomation.ios.server.command.BaseNativeCommandHandler; import java.util.List; public class GetSessionsNHandler extends BaseNativeCommandHandler { public GetSessionsNHandler(IOSServerManager driver, WebDriverLikeRequest request) { super(driver, request); } @Override public Response handle() throws Exception { JSONArray res = new JSONArray(); List<ServerSideSession> sessions = getServer().getSessions(); for (ServerSideSession s : sessions) { JSONObject session = new JSONObject(); session.put("id", s.getSessionId()); IOSRunningApplication app = s.getApplication(); IOSCapabilities cap = app.getCapabilities(); session.put("capabilities", cap.getRawCapabilities()); res.put(session); } Response resp = new Response(); resp.setSessionId("dummy one"); resp.setStatus(0); resp.setValue(res.toString()); return resp; } @Override public JSONObject configurationDescription() throws JSONException { return noConfigDefined(); } }
apache-2.0
drewnoakes/metadata-extractor
Tests/com/drew/lang/ByteTrieTest.java
2254
/* * Copyright 2002-2019 Drew Noakes and contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * More information about this project is available at: * * https://drewnoakes.com/code/exif/ * https://github.com/drewnoakes/metadata-extractor */ package com.drew.lang; import org.junit.Test; import static org.junit.Assert.*; /** * @author Drew Noakes https://drewnoakes.com */ public class ByteTrieTest { @Test public void testBasics() { ByteTrie<String> trie = new ByteTrie<String>(); String[] strings = {"HELLO", "HELLO WORLD", "HERBERT"}; for (String s : strings) trie.addPath(s, s.getBytes()); for (String s : strings) assertSame(s, trie.find(s.getBytes())); assertNull(trie.find("Not Included".getBytes())); assertNull(trie.find("HELL".getBytes())); assertNull(trie.find("H".getBytes())); assertEquals("HELLO", trie.find("HELLO MUM".getBytes())); assertEquals("HELLO WORLD".length(), trie.getMaxDepth()); trie.setDefaultValue("DEFAULT"); assertEquals("DEFAULT", trie.find("Also Not Included".getBytes())); assertEquals("DEFAULT", trie.find("H".getBytes())); assertEquals("DEFAULT", trie.find("HELL".getBytes())); } @Test public void testDisallowsAddingEmptyPath() { try { new ByteTrie<String>().addPath("Foo", new byte[0]); fail(); } catch (IllegalArgumentException ignored) { } try { new ByteTrie<String>().addPath("Foo", new byte[0], new byte[0], new byte[0]); fail(); } catch (IllegalArgumentException ignored) { } } }
apache-2.0
apurtell/hadoop
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryAliasMap.java
14926
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.aliasmap; import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream; import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.ProvidedStorageLocation; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ProvidedStorageLocationProto; import org.apache.hadoop.hdfs.protocolPB.PBHelperClient; import org.apache.hadoop.hdfs.server.common.FileRegion; import org.apache.hadoop.hdfs.server.namenode.ImageServlet; import org.apache.hadoop.hdfs.server.namenode.TransferFsImage; import org.apache.hadoop.hdfs.util.DataTransferThrottler; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.Lists; import org.fusesource.leveldbjni.JniDBFactory; import org.iq80.leveldb.DB; import org.iq80.leveldb.DBIterator; import org.iq80.leveldb.Options; import org.iq80.leveldb.ReadOptions; import org.iq80.leveldb.Snapshot; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nonnull; import javax.servlet.http.HttpServletResponse; import java.io.BufferedOutputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.net.URI; import java.nio.file.Files; import java.util.ArrayList; import java.util.Map; import java.util.Optional; /** * InMemoryAliasMap is an implementation of the InMemoryAliasMapProtocol for * use with LevelDB. */ @InterfaceAudience.Public @InterfaceStability.Unstable public class InMemoryAliasMap implements InMemoryAliasMapProtocol, Configurable { private static final Logger LOG = LoggerFactory .getLogger(InMemoryAliasMap.class); private static final String SNAPSHOT_COPY_DIR = "aliasmap_snapshot"; private static final String TAR_NAME = "aliasmap.tar.gz"; private final URI aliasMapURI; private final DB levelDb; private Configuration conf; private String blockPoolID; @Override public void setConf(Configuration conf) { this.conf = conf; } @Override public Configuration getConf() { return this.conf; } public static @Nonnull InMemoryAliasMap init(Configuration conf, String blockPoolID) throws IOException { Options options = new Options(); options.createIfMissing(true); String directory = conf.get(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_LEVELDB_DIR); if (directory == null) { throw new IOException("InMemoryAliasMap location is null"); } File levelDBpath; if (blockPoolID != null) { levelDBpath = new File(directory, blockPoolID); } else { levelDBpath = new File(directory); } if (!levelDBpath.exists()) { LOG.warn("InMemoryAliasMap location {} is missing. Creating it.", levelDBpath); if(!levelDBpath.mkdirs()) { throw new IOException( "Unable to create missing aliasmap location: " + levelDBpath); } } DB levelDb = JniDBFactory.factory.open(levelDBpath, options); InMemoryAliasMap aliasMap = new InMemoryAliasMap(levelDBpath.toURI(), levelDb, blockPoolID); aliasMap.setConf(conf); return aliasMap; } @VisibleForTesting InMemoryAliasMap(URI aliasMapURI, DB levelDb, String blockPoolID) { this.aliasMapURI = aliasMapURI; this.levelDb = levelDb; this.blockPoolID = blockPoolID; } @Override public IterationResult list(Optional<Block> marker) throws IOException { try (DBIterator iterator = levelDb.iterator()) { Integer batchSize = conf.getInt(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_BATCH_SIZE, DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_BATCH_SIZE_DEFAULT); if (marker.isPresent()) { iterator.seek(toProtoBufBytes(marker.get())); } else { iterator.seekToFirst(); } int i = 0; ArrayList<FileRegion> batch = Lists.newArrayListWithExpectedSize(batchSize); while (iterator.hasNext() && i < batchSize) { Map.Entry<byte[], byte[]> entry = iterator.next(); Block block = fromBlockBytes(entry.getKey()); ProvidedStorageLocation providedStorageLocation = fromProvidedStorageLocationBytes(entry.getValue()); batch.add(new FileRegion(block, providedStorageLocation)); ++i; } if (iterator.hasNext()) { Block nextMarker = fromBlockBytes(iterator.next().getKey()); return new IterationResult(batch, Optional.of(nextMarker)); } else { return new IterationResult(batch, Optional.empty()); } } } public @Nonnull Optional<ProvidedStorageLocation> read(@Nonnull Block block) throws IOException { byte[] extendedBlockDbFormat = toProtoBufBytes(block); byte[] providedStorageLocationDbFormat = levelDb.get(extendedBlockDbFormat); if (providedStorageLocationDbFormat == null) { return Optional.empty(); } else { ProvidedStorageLocation providedStorageLocation = fromProvidedStorageLocationBytes(providedStorageLocationDbFormat); return Optional.of(providedStorageLocation); } } public void write(@Nonnull Block block, @Nonnull ProvidedStorageLocation providedStorageLocation) throws IOException { byte[] extendedBlockDbFormat = toProtoBufBytes(block); byte[] providedStorageLocationDbFormat = toProtoBufBytes(providedStorageLocation); levelDb.put(extendedBlockDbFormat, providedStorageLocationDbFormat); } @Override public String getBlockPoolId() { return blockPoolID; } public void close() throws IOException { levelDb.close(); } @Nonnull public static ProvidedStorageLocation fromProvidedStorageLocationBytes( @Nonnull byte[] providedStorageLocationDbFormat) throws InvalidProtocolBufferException { ProvidedStorageLocationProto providedStorageLocationProto = ProvidedStorageLocationProto .parseFrom(providedStorageLocationDbFormat); return PBHelperClient.convert(providedStorageLocationProto); } @Nonnull public static Block fromBlockBytes(@Nonnull byte[] blockDbFormat) throws InvalidProtocolBufferException { BlockProto blockProto = BlockProto.parseFrom(blockDbFormat); return PBHelperClient.convert(blockProto); } public static byte[] toProtoBufBytes(@Nonnull ProvidedStorageLocation providedStorageLocation) throws IOException { ProvidedStorageLocationProto providedStorageLocationProto = PBHelperClient.convert(providedStorageLocation); ByteArrayOutputStream providedStorageLocationOutputStream = new ByteArrayOutputStream(); providedStorageLocationProto.writeTo(providedStorageLocationOutputStream); return providedStorageLocationOutputStream.toByteArray(); } public static byte[] toProtoBufBytes(@Nonnull Block block) throws IOException { BlockProto blockProto = PBHelperClient.convert(block); ByteArrayOutputStream blockOutputStream = new ByteArrayOutputStream(); blockProto.writeTo(blockOutputStream); return blockOutputStream.toByteArray(); } /** * Transfer this aliasmap for bootstrapping standby Namenodes. The map is * transferred as a tar.gz archive. This archive needs to be extracted on the * standby Namenode. * * @param response http response. * @param conf configuration to use. * @param aliasMap aliasmap to transfer. * @throws IOException */ public static void transferForBootstrap(HttpServletResponse response, Configuration conf, InMemoryAliasMap aliasMap) throws IOException { File aliasMapSnapshot = null; File compressedAliasMap = null; try { aliasMapSnapshot = createSnapshot(aliasMap); // compress the snapshot that is associated with the // block pool id of the aliasmap. compressedAliasMap = getCompressedAliasMap( new File(aliasMapSnapshot, aliasMap.blockPoolID)); try (FileInputStream fis = new FileInputStream(compressedAliasMap)) { ImageServlet.setVerificationHeadersForGet(response, compressedAliasMap); ImageServlet.setFileNameHeaders(response, compressedAliasMap); // send file DataTransferThrottler throttler = ImageServlet.getThrottlerForBootstrapStandby(conf); TransferFsImage.copyFileToStream(response.getOutputStream(), compressedAliasMap, fis, throttler); } } finally { // cleanup the temporary snapshot and compressed files. StringBuilder errMessage = new StringBuilder(); if (compressedAliasMap != null && !FileUtil.fullyDelete(compressedAliasMap)) { errMessage.append("Failed to fully delete compressed aliasmap ") .append(compressedAliasMap.getAbsolutePath()).append("\n"); } if (aliasMapSnapshot != null && !FileUtil.fullyDelete(aliasMapSnapshot)) { errMessage.append("Failed to fully delete the aliasmap snapshot ") .append(aliasMapSnapshot.getAbsolutePath()).append("\n"); } if (errMessage.length() > 0) { throw new IOException(errMessage.toString()); } } } /** * Create a new LevelDB store which is a snapshot copy of the original * aliasmap. * * @param aliasMap original aliasmap. * @return the {@link File} where the snapshot is created. * @throws IOException */ static File createSnapshot(InMemoryAliasMap aliasMap) throws IOException { File originalAliasMapDir = new File(aliasMap.aliasMapURI); String bpid = originalAliasMapDir.getName(); File snapshotDir = new File(originalAliasMapDir.getParent(), SNAPSHOT_COPY_DIR); File newLevelDBDir = new File(snapshotDir, bpid); if (!newLevelDBDir.mkdirs()) { throw new IOException( "Unable to create aliasmap snapshot directory " + newLevelDBDir); } // get a snapshot for the original DB. DB originalDB = aliasMap.levelDb; try (Snapshot snapshot = originalDB.getSnapshot()) { // create a new DB for the snapshot and copy all K,V pairs. Options options = new Options(); options.createIfMissing(true); try (DB snapshotDB = JniDBFactory.factory.open(newLevelDBDir, options)) { try (DBIterator iterator = originalDB.iterator(new ReadOptions().snapshot(snapshot))) { iterator.seekToFirst(); while (iterator.hasNext()) { Map.Entry<byte[], byte[]> entry = iterator.next(); snapshotDB.put(entry.getKey(), entry.getValue()); } } } } return snapshotDir; } /** * Compress the given aliasmap directory as tar.gz. * * @return a reference to the compressed aliasmap. * @throws IOException */ private static File getCompressedAliasMap(File aliasMapDir) throws IOException { File outCompressedFile = new File(aliasMapDir.getParent(), TAR_NAME); try (BufferedOutputStream bOut = new BufferedOutputStream( Files.newOutputStream(outCompressedFile.toPath())); GzipCompressorOutputStream gzOut = new GzipCompressorOutputStream(bOut); TarArchiveOutputStream tOut = new TarArchiveOutputStream(gzOut)){ addFileToTarGzRecursively(tOut, aliasMapDir, "", new Configuration()); } return outCompressedFile; } /** * Add all contents of the given file to the archive. * * @param tOut archive to use. * @param file file to archive. * @param prefix path prefix. * @throws IOException */ private static void addFileToTarGzRecursively(TarArchiveOutputStream tOut, File file, String prefix, Configuration conf) throws IOException { String entryName = prefix + file.getName(); TarArchiveEntry tarEntry = new TarArchiveEntry(file, entryName); tOut.putArchiveEntry(tarEntry); LOG.debug("Adding entry {} to alias map archive", entryName); if (file.isFile()) { try (FileInputStream in = new FileInputStream(file)) { IOUtils.copyBytes(in, tOut, conf, false); } tOut.closeArchiveEntry(); } else { tOut.closeArchiveEntry(); File[] children = file.listFiles(); if (children != null) { for (File child : children) { // skip the LOCK file if (!child.getName().equals("LOCK")) { addFileToTarGzRecursively(tOut, child, entryName + "/", conf); } } } } } /** * Extract the aliasmap archive to complete the bootstrap process. This method * has to be called after the aliasmap archive is transfered from the primary * Namenode. * * @param aliasMap location of the aliasmap. * @throws IOException */ public static void completeBootstrapTransfer(File aliasMap) throws IOException { File tarname = new File(aliasMap, TAR_NAME); if (!tarname.exists()) { throw new IOException( "Aliasmap archive (" + tarname + ") does not exist"); } try { FileUtil.unTar(tarname, aliasMap); } finally { // delete the archive. if(!FileUtil.fullyDelete(tarname)) { LOG.warn("Failed to fully delete aliasmap archive: " + tarname); } } } /** * CheckedFunction is akin to {@link java.util.function.Function} but * specifies an IOException. * @param <T1> First argument type. * @param <T2> Second argument type. * @param <R> Return type. */ @FunctionalInterface public interface CheckedFunction2<T1, T2, R> { R apply(T1 t1, T2 t2) throws IOException; } }
apache-2.0
anton415/Job4j
Intern/Part_3_Collections/src/main/java/ru/aserdyuchenko/tracker/start/ConsoleInput.java
3380
package ru.aserdyuchenko.tracker.start; import java.util.*; import ru.aserdyuchenko.tracker.models.Task; import ru.aserdyuchenko.tracker.models.Item; public class ConsoleInput implements Input{ Tracker tracker = new Tracker(); private Scanner scanner = new Scanner (System.in); String name; String description; public String askMenuChoice(String menuQuestion){ System.out.print(menuQuestion); return scanner.nextLine(); } public String askNameForNewItem(String nameForNewItem){ System.out.print(nameForNewItem); return scanner.nextLine(); } public String askDescForNewItem(String descForNewItem){ System.out.print(descForNewItem); return scanner.nextLine(); } public String askItemId(String itemId){ System.out.print(itemId); return scanner.nextLine(); } public void addNewItem(){ System.out.println("Your choice is: Adding a new application."); String name = new ConsoleInput().askNameForNewItem("Enter name for new item: "); String description = new ConsoleInput().askDescForNewItem("Enter desc for new item: "); tracker.add(new Task(name, description)); System.out.println("Add new item."); } public void editItem(){ System.out.println("Your choice is: Edit applications."); String itemId = new ConsoleInput().askItemId("Enter itemId for editing item: "); String name = new ConsoleInput().askNameForNewItem("Enter new name for item: "); String desc = new ConsoleInput().askDescForNewItem("Enter new desc for item: "); tracker.updateItem(itemId, new Task(name, desc)); System.out.println("Item is edit."); } public void removeItem(){ System.out.println("Your choice is: Removing the application."); String itemId = new ConsoleInput().askItemId("Enter itemId for removing item: "); tracker.delete(itemId); System.out.println("Item is removing."); } public void displaysAListOfAllItem(){ System.out.println("Your choice is: Displays a list of all applications."); for (Item item : tracker.getAll()){ if (item != null){ System.out.println("Item Id: " + item.getId()); System.out.println("Name: " + item.getName()); System.out.println("Desc: " + item.getDescription()); /** * if this item already remove */ } else { System.out.println("Now, this item is null"); } } System.out.println("Displays a list of all applications."); } public String menuChoice(String choice) { if (choice == "0") { /** * Add new item */ addNewItem(); } else if (choice == "1") { /** * Edit item */ editItem(); } else if (choice == "2") { /** * Remove item */ removeItem(); } else if (choice == "3") { /** * Displays a list of all item */ displaysAListOfAllItem(); } else if (choice == "4") { /** * Exite */ System.out.println("Your choice is: Exite."); } else { /** * if you console number dont 1-4 */ System.out.println("Your choice is incorrect, please enter number from 1 to 4!"); } return choice; } public int askMenuChoice(String question, int[] range) { int key = Integer.valueOf(this.askMenuChoice(question)); boolean exist = false; for (int value : range){ if (value == key) { exist = true; break; } } if (exist) { return key; } else { throw new MenuOutException("Out of menu range."); } } }
apache-2.0
FlakyTestDetection/jackrabbit-oak
oak-store-document/src/test/java/com/mongodb/OakFongo.java
8641
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mongodb; import java.lang.reflect.Field; import java.util.List; import java.util.Map; import com.github.fakemongo.Fongo; import com.mongodb.connection.ServerVersion; public class OakFongo extends Fongo { private final Map<String, FongoDB> dbMap; public OakFongo(String name) throws Exception { super(name); this.dbMap = getDBMap(); } @Override public FongoDB getDB(String dbname) { synchronized (dbMap) { FongoDB fongoDb = dbMap.get(dbname); if (fongoDb == null) { try { fongoDb = new OakFongoDB(this, dbname); } catch (Exception e) { throw new MongoException(e.getMessage(), e); } dbMap.put(dbname, fongoDb); } return fongoDb; } } @SuppressWarnings("unchecked") private Map<String, FongoDB> getDBMap() throws Exception { Field f = Fongo.class.getDeclaredField("dbMap"); f.setAccessible(true); return (Map<String, FongoDB>) f.get(this); } protected void beforeInsert(List<? extends DBObject> documents, InsertOptions insertOptions) {} protected void afterInsert(WriteResult result) {} protected void beforeFindAndModify(DBObject query, DBObject fields, DBObject sort, boolean remove, DBObject update, boolean returnNew, boolean upsert) {} protected void afterFindAndModify(DBObject result) {} protected void beforeUpdate(DBObject q, DBObject o, boolean upsert, boolean multi, WriteConcern concern, DBEncoder encoder) {} protected void afterUpdate(WriteResult result) {} protected void beforeRemove(DBObject query, WriteConcern writeConcern) {} protected void afterRemove(WriteResult result) {} protected void beforeExecuteBulkWriteOperation(boolean ordered, Boolean bypassDocumentValidation, List<?> writeRequests, WriteConcern aWriteConcern) {} protected void beforeFind(DBObject query, DBObject projection) {} protected void afterFind(DBCursor cursor) {} protected void afterExecuteBulkWriteOperation(BulkWriteResult result) {} private class OakFongoDB extends FongoDB { private final Map<String, FongoDBCollection> collMap; public OakFongoDB(Fongo fongo, String name) throws Exception { super(fongo, name); this.collMap = getCollMap(); } @SuppressWarnings("unchecked") private Map<String, FongoDBCollection> getCollMap() throws Exception { Field f = FongoDB.class.getDeclaredField("collMap"); f.setAccessible(true); return (Map<String, FongoDBCollection>) f.get(this); } @Override public CommandResult command(DBObject cmd, ReadPreference readPreference, DBEncoder encoder) { if (cmd.containsField("serverStatus")) { CommandResult commandResult = okResult(); commandResult.append("version", asString(getServerVersion())); return commandResult; } else { return super.command(cmd, readPreference, encoder); } } @Override public synchronized FongoDBCollection doGetCollection(String name, boolean idIsNotUniq) { if (name.startsWith("system.")) { return super.doGetCollection(name, idIsNotUniq); } FongoDBCollection coll = collMap.get(name); if (coll == null) { coll = new OakFongoDBCollection(this, name, idIsNotUniq); collMap.put(name, coll); } return coll; } private String asString(ServerVersion serverVersion) { StringBuilder sb = new StringBuilder(); for (int i : serverVersion.getVersionList()) { if (sb.length() != 0) { sb.append('.'); } sb.append(String.valueOf(i)); } return sb.toString(); } } private class OakFongoDBCollection extends FongoDBCollection { public OakFongoDBCollection(FongoDB db, String name, boolean idIsNotUniq) { super(db, name, idIsNotUniq); } @Override public WriteResult insert(List<? extends DBObject> documents, InsertOptions insertOptions) { beforeInsert(documents, insertOptions); WriteResult result = super.insert(documents, insertOptions); afterInsert(result); return result; } @Override public WriteResult remove(DBObject query, WriteConcern writeConcern) { beforeRemove(query, writeConcern); WriteResult result = super.remove(query, writeConcern); afterRemove(result); return result; } @Override public WriteResult update(DBObject q, DBObject o, boolean upsert, boolean multi, WriteConcern concern, DBEncoder encoder) { beforeUpdate(q, o, upsert, multi, concern, encoder); WriteResult result = super.update(q, o, upsert, multi, concern, encoder); afterUpdate(result); return result; } @Override public DBObject findAndModify(DBObject query, DBObject fields, DBObject sort, boolean remove, DBObject update, boolean returnNew, boolean upsert) { beforeFindAndModify(query, fields, sort, remove, update, returnNew, upsert); DBObject result = super.findAndModify(query, fields, sort, remove, update, returnNew, upsert); afterFindAndModify(result); return result; } @Override BulkWriteResult executeBulkWriteOperation(boolean ordered, Boolean bypassDocumentValidation, List<WriteRequest> writeRequests, WriteConcern aWriteConcern) { beforeExecuteBulkWriteOperation(ordered, bypassDocumentValidation, writeRequests, aWriteConcern); BulkWriteResult result = super.executeBulkWriteOperation(ordered, bypassDocumentValidation, writeRequests, aWriteConcern); afterExecuteBulkWriteOperation(result); return result; } @Override public DBCursor find(DBObject query, DBObject projection) { beforeFind(query, projection); DBCursor result = super.find(query, projection); afterFind(result); return result; } } }
apache-2.0